2 # -*- coding: utf-8 -*-
5 Create and manage the database objects.
7 # Copyright 2008-2011, Ray E. Barker
9 # This program is free software; you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation; either version 2 of the License, or
12 # (at your option) any later version.
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
19 # You should have received a copy of the GNU General Public License
20 # along with this program; if not, write to the Free Software
21 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 _
= L10n
.get_translation()
26 ########################################################################
28 # TODO: - rebuild indexes / vacuum option
29 # - check speed of get_stats_from_hand() - add log info
30 # - check size of db, seems big? (mysql)
31 # - investigate size of mysql db (200K for just 7K hands? 2GB for 140K hands?)
33 # postmaster -D /var/lib/pgsql/data
35 # Standard Library modules
39 from datetime
import datetime
, date
, time
, timedelta
40 from time
import time
, strftime
, sleep
41 from decimal_wrapper
import Decimal
49 # logging has been set up in fpdb.py or HUD_main.py, use their settings:
50 log
= logging
.getLogger("db")
52 # FreePokerTools modules
56 from Exceptions
import *
60 # Other library modules
62 import sqlalchemy
.pool
as pool
65 log
.info(_("Not using sqlalchemy connection pool."))
72 log
.info(_("Not using numpy to define variance in sqlite."))
79 # Variance created as sqlite has a bunch of undefined aggregate functions.
85 def step(self
, value
):
86 self
.store
.append(value
)
89 return float(var(self
.store
))
102 hero_hudstart_def
= '1999-12-31' # default for length of Hero's stats in HUD
103 villain_hudstart_def
= '1999-12-31' # default for length of Villain's stats in HUD
105 # Data Structures for index and foreign key creation
106 # drop_code is an int with possible values: 0 - don't drop for bulk import
107 # 1 - drop during bulk import
109 # - note that mysql automatically creates indexes on constrained columns when
110 # foreign keys are created, while postgres does not. Hence the much longer list
111 # of indexes is required for postgres.
112 # all primary keys are left on all the time
114 # table column drop_code
117 [ ] # no db with index 0
118 , [ ] # no db with index 1
119 , [ # indexes for mysql (list index 2) (foreign keys not here, in next data structure)
120 # {'tab':'Players', 'col':'name', 'drop':0} unique indexes not dropped
121 # {'tab':'Hands', 'col':'siteHandNo', 'drop':0} unique indexes not dropped
122 #, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} unique indexes not dropped
124 , [ # indexes for postgres (list index 3)
125 {'tab':'Gametypes', 'col':'siteId', 'drop':0}
126 , {'tab':'Hands', 'col':'gametypeId', 'drop':0} # mct 22/3/09
127 , {'tab':'Hands', 'col':'fileId', 'drop':0} # mct 22/3/09
128 #, {'tab':'Hands', 'col':'siteHandNo', 'drop':0} unique indexes not dropped
129 , {'tab':'HandsActions', 'col':'handId', 'drop':1}
130 , {'tab':'HandsActions', 'col':'playerId', 'drop':1}
131 , {'tab':'HandsActions', 'col':'actionId', 'drop':1}
132 , {'tab':'Boards', 'col':'handId', 'drop':1}
133 , {'tab':'HandsPlayers', 'col':'handId', 'drop':1}
134 , {'tab':'HandsPlayers', 'col':'playerId', 'drop':1}
135 , {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
136 , {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
137 , {'tab':'HudCache', 'col':'playerId', 'drop':0}
138 , {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
139 , {'tab':'SessionsCache', 'col':'gametypeId', 'drop':1}
140 , {'tab':'SessionsCache', 'col':'playerId', 'drop':0}
141 , {'tab':'SessionsCache', 'col':'tourneyTypeId', 'drop':0}
142 , {'tab':'Players', 'col':'siteId', 'drop':1}
143 #, {'tab':'Players', 'col':'name', 'drop':0} unique indexes not dropped
144 , {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
145 #, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} unique indexes not dropped
146 , {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
147 #, {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0} unique indexes not dropped
148 , {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
149 , {'tab':'Backings', 'col':'tourneysPlayersId', 'drop':0}
150 , {'tab':'Backings', 'col':'playerId', 'drop':0}
151 , {'tab':'RawHands', 'col':'id', 'drop':0}
152 , {'tab':'RawTourneys', 'col':'id', 'drop':0}
154 , [ # indexes for sqlite (list index 4)
155 {'tab':'Hands', 'col':'gametypeId', 'drop':0}
156 , {'tab':'Hands', 'col':'fileId', 'drop':0}
157 , {'tab':'Boards', 'col':'handId', 'drop':0}
158 , {'tab':'HandsPlayers', 'col':'handId', 'drop':0}
159 , {'tab':'HandsPlayers', 'col':'playerId', 'drop':0}
160 , {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
161 , {'tab':'HandsActions', 'col':'handId', 'drop':0}
162 , {'tab':'HandsActions', 'col':'playerId', 'drop':0}
163 , {'tab':'HandsActions', 'col':'actionId', 'drop':1}
164 , {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
165 , {'tab':'HudCache', 'col':'playerId', 'drop':0}
166 , {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
167 , {'tab':'SessionsCache', 'col':'gametypeId', 'drop':1}
168 , {'tab':'SessionsCache', 'col':'playerId', 'drop':0}
169 , {'tab':'SessionsCache', 'col':'tourneyTypeId', 'drop':0}
170 , {'tab':'Players', 'col':'siteId', 'drop':1}
171 , {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
172 , {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
173 , {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
174 , {'tab':'Backings', 'col':'tourneysPlayersId', 'drop':0}
175 , {'tab':'Backings', 'col':'playerId', 'drop':0}
176 , {'tab':'RawHands', 'col':'id', 'drop':0}
177 , {'tab':'RawTourneys', 'col':'id', 'drop':0}
182 [ ] # no db with index 0
183 , [ ] # no db with index 1
184 , [ # foreign keys for mysql (index 2)
185 {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
186 , {'fktab':'Hands', 'fkcol':'fileId', 'rtab':'Files', 'rcol':'id', 'drop':1}
187 , {'fktab':'Boards', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
188 , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
189 , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
190 , {'fktab':'HandsPlayers', 'fkcol':'tourneysPlayersId','rtab':'TourneysPlayers','rcol':'id', 'drop':1}
191 , {'fktab':'HandsActions', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
192 , {'fktab':'HandsActions', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
193 , {'fktab':'HandsActions', 'fkcol':'actionId', 'rtab':'Actions', 'rcol':'id', 'drop':1}
194 , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
195 , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
196 , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
197 , {'fktab':'SessionsCache','fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
198 , {'fktab':'SessionsCache','fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
199 , {'fktab':'SessionsCache','fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
201 , [ # foreign keys for postgres (index 3)
202 {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
203 , {'fktab':'Hands', 'fkcol':'fileId', 'rtab':'Files', 'rcol':'id', 'drop':1}
204 , {'fktab':'Boards', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
205 , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
206 , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
207 , {'fktab':'HandsActions', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
208 , {'fktab':'HandsActions', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
209 , {'fktab':'HandsActions', 'fkcol':'actionId', 'rtab':'Actions', 'rcol':'id', 'drop':1}
210 , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
211 , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
212 , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
213 , {'fktab':'SessionsCache','fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
214 , {'fktab':'SessionsCache','fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
215 , {'fktab':'SessionsCache','fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
217 , [ # no foreign keys in sqlite (index 4)
223 # "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id
224 # - creates index handId on <thistable>.handId
225 # alter table t drop foreign key fk
226 # alter table t add foreign key (fkcol) references tab(rcol)
227 # alter table t add constraint c foreign key (fkcol) references tab(rcol)
228 # (fkcol is used for foreigh key name)
230 # mysql to list indexes: (CG - "LIST INDEXES" should work too)
231 # SELECT table_name, index_name, non_unique, column_name
232 # FROM INFORMATION_SCHEMA.STATISTICS
233 # WHERE table_name = 'tbl_name'
234 # AND table_schema = 'db_name'
235 # ORDER BY table_name, index_name, seq_in_index
237 # ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo)
238 # ALTER TABLE tab DROP INDEX idx
241 # SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name
242 # FROM information_schema.KEY_COLUMN_USAGE
243 # WHERE REFERENCED_TABLE_SCHEMA = (your schema name here)
244 # AND REFERENCED_TABLE_NAME is not null
245 # ORDER BY TABLE_NAME, COLUMN_NAME;
247 # this may indicate missing object
248 # _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)")
253 # To add a foreign key constraint to a table:
254 # ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL;
255 # ALTER TABLE tab DROP CONSTRAINT zipchk
257 # Note: index names must be unique across a schema
258 # CREATE INDEX idx ON tab(col)
260 # SELECT * FROM PG_INDEXES
265 # create index indexname on tablename (col);
268 def __init__(self
, c
, sql
= None, autoconnect
= True):
269 #log = Configuration.get_logger("logging.conf", "db", log_dir=c.dir_log)
270 log
.debug(_("Creating Database instance, sql = %s") % sql
)
272 self
.__connected
= False
274 self
.settings
['os'] = "linuxmac" if os
.name
!= "nt" else "windows"
275 db_params
= c
.get_db_parameters()
276 self
.import_options
= c
.get_import_parameters()
277 self
.backend
= db_params
['db-backend']
278 self
.db_server
= db_params
['db-server']
279 self
.database
= db_params
['db-databaseName']
280 self
.host
= db_params
['db-host']
282 gen
= c
.get_general_params()
284 self
._has
_lock
= False
286 if 'day_start' in gen
:
287 self
.day_start
= float(gen
['day_start'])
289 self
.sessionTimeout
= float(self
.import_options
['sessionTimeout'])
291 # where possible avoid creating new SQL instance by using the global one passed in
293 self
.sql
= SQL
.Sql(db_server
= self
.db_server
)
301 if self
.backend
== self
.PGSQL
:
302 from psycopg2
.extensions
import ISOLATION_LEVEL_AUTOCOMMIT
, ISOLATION_LEVEL_READ_COMMITTED
, ISOLATION_LEVEL_SERIALIZABLE
303 #ISOLATION_LEVEL_AUTOCOMMIT = 0
304 #ISOLATION_LEVEL_READ_COMMITTED = 1
305 #ISOLATION_LEVEL_SERIALIZABLE = 2
308 if self
.backend
== self
.SQLITE
and self
.database
== ':memory:' and self
.wrongDbVersion
and self
.is_connected():
309 log
.info("sqlite/:memory: - creating")
310 self
.recreate_tables()
311 self
.wrongDbVersion
= False
313 self
.gtcache
= None # GameTypeId cache
314 self
.ttcache
= None # TourneyTypeId cache
315 self
.tcache
= None # TourneyId cache
316 self
.pcache
= None # PlayerId cache
317 self
.tpcache
= None # TourneysPlayersId cache
318 self
.cachemiss
= 0 # Delete me later - using to count player cache misses
319 self
.cachehit
= 0 # Delete me later - using to count player cache hits
321 # config while trying out new hudcache mechanism
322 self
.use_date_in_hudcache
= True
324 #self.hud_hero_style = 'T' # Duplicate set of vars just for hero - not used yet.
325 #self.hud_hero_hands = 2000 # Idea is that you might want all-time stats for others
326 #self.hud_hero_days = 30 # but last T days or last H hands for yourself
328 # vars for hand ids or dates fetched according to above config:
329 self
.hand_1day_ago
= 0 # max hand id more than 24 hrs earlier than now
330 self
.date_ndays_ago
= 'd000000' # date N days ago ('d' + YYMMDD)
331 self
.h_date_ndays_ago
= 'd000000' # date N days ago ('d' + YYMMDD) for hero
332 self
.date_nhands_ago
= {} # dates N hands ago per player - not used yet
334 self
.saveActions
= False if self
.import_options
['saveActions'] == False else True
336 if self
.is_connected():
337 if not self
.wrongDbVersion
:
339 self
.connection
.rollback() # make sure any locks taken so far are released
342 def dumpDatabase(self
):
343 result
="fpdb database dump\nDB version=" + str(DB_VERSION
)+"\n\n"
345 tables
=self
.cursor
.execute(self
.sql
.query
['list_tables'])
346 tables
=self
.cursor
.fetchall()
347 for table
in (u
'Actions', u
'Autorates', u
'Backings', u
'Gametypes', u
'Hands', u
'Boards', u
'HandsActions', u
'HandsPlayers', u
'Files', u
'HudCache', u
'SessionsCache', u
'Players', u
'RawHands', u
'RawTourneys', u
'Settings', u
'Sites', u
'TourneyTypes', u
'Tourneys', u
'TourneysPlayers'):
348 print "table:", table
349 result
+="###################\nTable "+table
+"\n###################\n"
350 rows
=self
.cursor
.execute(self
.sql
.query
['get'+table
])
351 rows
=self
.cursor
.fetchall()
352 columnNames
=self
.cursor
.description
354 result
+="empty table\n"
357 for columnNumber
in range(len(columnNames
)):
358 if columnNames
[columnNumber
][0]=="importTime":
359 result
+=(" "+columnNames
[columnNumber
][0]+"=ignore\n")
360 elif columnNames
[columnNumber
][0]=="styleKey":
361 result
+=(" "+columnNames
[columnNumber
][0]+"=ignore\n")
363 result
+=(" "+columnNames
[columnNumber
][0]+"="+str(row
[columnNumber
])+"\n")
367 #end def dumpDatabase
369 # could be used by hud to change hud style
370 def set_hud_style(self
, style
):
371 self
.hud_style
= style
373 def do_connect(self
, c
):
375 raise FpdbError('Configuration not defined')
377 db
= c
.get_db_parameters()
379 self
.connect(backend
=db
['db-backend'],
381 database
=db
['db-databaseName'],
383 password
=db
['db-password'])
385 # error during connect
386 self
.__connected
= False
389 db_params
= c
.get_db_parameters()
390 self
.import_options
= c
.get_import_parameters()
391 self
.backend
= db_params
['db-backend']
392 self
.db_server
= db_params
['db-server']
393 self
.database
= db_params
['db-databaseName']
394 self
.host
= db_params
['db-host']
396 def connect(self
, backend
=None, host
=None, database
=None,
397 user
=None, password
=None, create
=False):
398 """Connects a database with the given parameters"""
400 raise FpdbError('Database backend not defined')
401 self
.backend
= backend
404 self
.password
= password
405 self
.database
= database
406 self
.connection
= None
409 if backend
== Database
.MYSQL_INNODB
:
412 MySQLdb
= pool
.manage(MySQLdb
, pool_size
=5)
414 self
.connection
= MySQLdb
.connect(host
=host
420 self
.__connected
= True
421 #TODO: Add port option
422 except MySQLdb
.Error
, ex
:
423 if ex
.args
[0] == 1045:
424 raise FpdbMySQLAccessDenied(ex
.args
[0], ex
.args
[1])
425 elif ex
.args
[0] == 2002 or ex
.args
[0] == 2003: # 2002 is no unix socket, 2003 is no tcp socket
426 raise FpdbMySQLNoDatabase(ex
.args
[0], ex
.args
[1])
428 print _("*** WARNING UNKNOWN MYSQL ERROR:"), ex
429 elif backend
== Database
.PGSQL
:
431 import psycopg2
.extensions
433 psycopg2
= pool
.manage(psycopg2
, pool_size
=5)
434 psycopg2
.extensions
.register_type(psycopg2
.extensions
.UNICODE
)
435 # If DB connection is made over TCP, then the variables
436 # host, user and password are required
437 # For local domain-socket connections, only DB name is
438 # needed, and everything else is in fact undefined and/or
440 # sqlcoder: This database only connect failed in my windows setup??
441 # Modifed it to try the 4 parameter style if the first connect fails - does this work everywhere?
442 self
.__connected
= False
443 if self
.host
== "localhost" or self
.host
== "127.0.0.1":
445 self
.connection
= psycopg2
.connect(database
= database
)
446 self
.__connected
= True
448 # direct connection failed so try user/pass/... version
450 if not self
.is_connected():
452 self
.connection
= psycopg2
.connect(host
= host
,
456 self
.__connected
= True
457 except Exception, ex
:
458 if 'Connection refused' in ex
.args
[0]:
459 # meaning eg. db not running
460 raise FpdbPostgresqlNoDatabase(errmsg
= ex
.args
[0])
461 elif 'password authentication' in ex
.args
[0]:
462 raise FpdbPostgresqlAccessDenied(errmsg
= ex
.args
[0])
467 elif backend
== Database
.SQLITE
:
471 sqlite3
= pool
.manage(sqlite3
, pool_size
=1)
473 # log.warning("SQLite won't work well without 'sqlalchemy' installed.")
475 if database
!= ":memory:":
476 if not os
.path
.isdir(self
.config
.dir_database
) and create
:
477 print _("Creating directory: '%s'") % (self
.config
.dir_database
)
478 log
.info(_("Creating directory: '%s'") % (self
.config
.dir_database
))
479 os
.mkdir(self
.config
.dir_database
)
480 database
= os
.path
.join(self
.config
.dir_database
, database
)
481 self
.db_path
= database
482 log
.info(_("Connecting to SQLite: %s") % self
.db_path
)
483 if os
.path
.exists(database
) or create
:
484 self
.connection
= sqlite3
.connect(self
.db_path
, detect_types
=sqlite3
.PARSE_DECLTYPES
)
485 self
.__connected
= True
486 sqlite3
.register_converter("bool", lambda x
: bool(int(x
)))
487 sqlite3
.register_adapter(bool, lambda x
: 1 if x
else 0)
488 self
.connection
.create_function("floor", 1, math
.floor
)
490 self
.connection
.create_function("mod", 2, tmp
.mod
)
492 self
.connection
.create_aggregate("variance", 1, VARIANCE
)
494 log
.warning(_("Some database functions will not work without NumPy support"))
495 self
.cursor
= self
.connection
.cursor()
496 self
.cursor
.execute('PRAGMA temp_store=2') # use memory for temp tables/indexes
497 self
.cursor
.execute('PRAGMA journal_mode=WAL') # use memory for temp tables/indexes
498 self
.cursor
.execute('PRAGMA synchronous=0') # don't wait for file writes to finish
500 raise FpdbError("sqlite database "+database
+" does not exist")
502 raise FpdbError("unrecognised database backend:"+str(backend
))
504 if self
.is_connected():
505 self
.cursor
= self
.connection
.cursor()
506 self
.cursor
.execute(self
.sql
.query
['set tx level'])
507 self
.check_version(database
=database
, create
=create
)
510 self
.cursor
.execute("SELECT name,id FROM Sites")
511 sites
= self
.cursor
.fetchall()
512 self
.config
.set_site_ids(sites
)
514 def check_version(self
, database
, create
):
515 self
.wrongDbVersion
= False
517 self
.cursor
.execute("SELECT * FROM Settings")
518 settings
= self
.cursor
.fetchone()
519 if settings
[0] != DB_VERSION
:
520 log
.error((_("Outdated or too new database version (%s).") % (settings
[0])) + " " + _("Please recreate tables."))
521 self
.wrongDbVersion
= True
522 except:# _mysql_exceptions.ProgrammingError:
523 if database
!= ":memory:":
525 #print (_("Failed to read settings table.") + " - " + _("Recreating tables."))
526 log
.info(_("Failed to read settings table.") + " - " + _("Recreating tables."))
527 self
.recreate_tables()
528 self
.check_version(database
=database
, create
=False)
530 #print (_("Failed to read settings table.") + " - " + _("Please recreate tables."))
531 log
.info(_("Failed to read settings table.") + " - " + _("Please recreate tables."))
532 self
.wrongDbVersion
= True
534 self
.wrongDbVersion
= True
538 if self
.backend
!= self
.SQLITE
:
539 self
.connection
.commit()
541 # sqlite commits can fail because of shared locks on the database (SQLITE_BUSY)
542 # re-try commit if it fails in case this happened
546 for i
in xrange(maxtimes
):
548 ret
= self
.connection
.commit()
549 #log.debug(_("commit finished ok, i = ")+str(i))
552 log
.debug(_("commit %s failed: info=%s value=%s") % (str(i
), str(sys
.exc_info()), str(sys
.exc_value
)))
556 log
.debug(_("commit failed"))
557 raise FpdbError('sqlite commit failed')
560 self
.connection
.rollback()
563 """ now deprecated, use is_connected() instead """
564 return self
.__connected
566 def is_connected(self
):
567 return self
.__connected
569 def get_cursor(self
):
570 return self
.connection
.cursor()
572 def close_connection(self
):
573 self
.connection
.close()
575 def disconnect(self
, due_to_error
=False):
576 """Disconnects the DB (rolls back if param is true, otherwise commits"""
578 self
.connection
.rollback()
580 self
.connection
.commit()
582 self
.connection
.close()
583 self
.__connected
= False
585 def reconnect(self
, due_to_error
=False):
586 """Reconnects the DB"""
587 #print "started reconnect"
588 self
.disconnect(due_to_error
)
589 self
.connect(self
.backend
, self
.host
, self
.database
, self
.user
, self
.password
)
591 def get_backend_name(self
):
592 """Returns the name of the currently used backend"""
594 return "MySQL InnoDB"
595 elif self
.backend
==3:
597 elif self
.backend
==4:
600 raise FpdbError("invalid backend")
602 def get_db_info(self
):
603 return (self
.host
, self
.database
, self
.user
, self
.password
)
605 def get_table_name(self
, hand_id
):
606 c
= self
.connection
.cursor()
607 c
.execute(self
.sql
.query
['get_table_name'], (hand_id
, ))
611 def get_table_info(self
, hand_id
):
612 c
= self
.connection
.cursor()
613 c
.execute(self
.sql
.query
['get_table_name'], (hand_id
, ))
616 if row
[3] == "ring": # cash game
621 tour_no
, tab_no
= re
.split(" ", row
[0])
626 def get_last_hand(self
):
627 c
= self
.connection
.cursor()
628 c
.execute(self
.sql
.query
['get_last_hand'])
632 def get_xml(self
, hand_id
):
633 c
= self
.connection
.cursor()
634 c
.execute(self
.sql
.query
['get_xml'], (hand_id
))
638 def get_recent_hands(self
, last_hand
):
639 c
= self
.connection
.cursor()
640 c
.execute(self
.sql
.query
['get_recent_hands'], {'last_hand': last_hand
})
643 def get_hand_info(self
, new_hand_id
):
644 c
= self
.connection
.cursor()
645 c
.execute(self
.sql
.query
['get_hand_info'], new_hand_id
)
648 def getHandCount(self
):
649 c
= self
.connection
.cursor()
650 c
.execute(self
.sql
.query
['getHandCount'])
651 return c
.fetchone()[0]
652 #end def getHandCount
654 def getTourneyCount(self
):
655 c
= self
.connection
.cursor()
656 c
.execute(self
.sql
.query
['getTourneyCount'])
657 return c
.fetchone()[0]
658 #end def getTourneyCount
660 def getTourneyTypeCount(self
):
661 c
= self
.connection
.cursor()
662 c
.execute(self
.sql
.query
['getTourneyTypeCount'])
663 return c
.fetchone()[0]
664 #end def getTourneyCount
666 def getSiteTourneyNos(self
, site
):
667 c
= self
.connection
.cursor()
668 # FIXME: Take site and actually fetch siteId from that
669 # Fixed to Winamax atm
670 q
= self
.sql
.query
['getSiteTourneyNos']
671 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
674 for row
in c
.fetchall():
678 def get_actual_seat(self
, hand_id
, name
):
679 c
= self
.connection
.cursor()
680 c
.execute(self
.sql
.query
['get_actual_seat'], (hand_id
, name
))
684 def get_cards(self
, hand
):
685 """Get and return the cards for each player in the hand."""
686 cards
= {} # dict of cards, the key is the seat number,
687 # the value is a tuple of the players cards
688 # example: {1: (0, 0, 20, 21, 22, 0 , 0)}
689 c
= self
.connection
.cursor()
690 c
.execute(self
.sql
.query
['get_cards'], [hand
])
691 for row
in c
.fetchall():
692 cards
[row
[0]] = row
[1:]
695 def get_common_cards(self
, hand
):
696 """Get and return the community cards for the specified hand."""
698 c
= self
.connection
.cursor()
699 c
.execute(self
.sql
.query
['get_common_cards'], [hand
])
701 cards
['common'] = c
.fetchone()
704 def get_action_from_hand(self
, hand_no
):
705 action
= [ [], [], [], [], [] ]
706 c
= self
.connection
.cursor()
707 c
.execute(self
.sql
.query
['get_action_from_hand'], (hand_no
,))
708 for row
in c
.fetchall():
711 action
[street
].append(act
)
714 def get_winners_from_hand(self
, hand
):
715 """Returns a hash of winners:amount won, given a hand number."""
717 c
= self
.connection
.cursor()
718 c
.execute(self
.sql
.query
['get_winners_from_hand'], (hand
,))
719 for row
in c
.fetchall():
720 winners
[row
[0]] = row
[1]
723 def init_hud_stat_vars(self
, hud_days
, h_hud_days
):
724 """Initialise variables used by Hud to fetch stats:
725 self.hand_1day_ago handId of latest hand played more than a day ago
726 self.date_ndays_ago date n days ago
727 self.h_date_ndays_ago date n days ago for hero (different n)
730 self
.hand_1day_ago
= 1
732 c
= self
.get_cursor()
733 c
.execute(self
.sql
.query
['get_hand_1day_ago'])
735 except: # TODO: what error is a database error?!
736 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
737 print _("*** Database Error: ") + err
[2] + "(" + str(err
[1]) + "): " + str(sys
.exc_info()[1])
740 self
.hand_1day_ago
= int(row
[0])
742 tz
= datetime
.utcnow() - datetime
.today()
743 tz_offset
= tz
.seconds
/3600
744 tz_day_start_offset
= self
.day_start
+ tz_offset
746 d
= timedelta(days
=hud_days
, hours
=tz_day_start_offset
)
747 now
= datetime
.utcnow() - d
748 self
.date_ndays_ago
= "d%02d%02d%02d" % (now
.year
- 2000, now
.month
, now
.day
)
749 log
.error("hud_days = %s" % hud_days
)
750 log
.error("self.date_ndays_ago = %s" % (self
.date_ndays_ago
,))
752 d
= timedelta(days
=h_hud_days
, hours
=tz_day_start_offset
)
753 now
= datetime
.utcnow() - d
754 self
.h_date_ndays_ago
= "d%02d%02d%02d" % (now
.year
- 2000, now
.month
, now
.day
)
755 log
.error("h_hud_days = %s" % h_hud_days
)
756 log
.error("self.h_date_ndays_ago = %s" % (self
.h_date_ndays_ago
,))
758 def init_player_hud_stat_vars(self
, playerid
):
759 # not sure if this is workable, to be continued ...
761 # self.date_nhands_ago is used for fetching stats for last n hands (hud_style = 'H')
762 # This option not used yet - needs to be called for each player :-(
763 self
.date_nhands_ago
[str(playerid
)] = 'd000000'
765 # should use aggregated version of query if appropriate
766 c
.execute(self
.sql
.query
['get_date_nhands_ago'], (self
.hud_hands
, playerid
))
769 self
.date_nhands_ago
[str(playerid
)] = row
[0]
771 print _("Database: n hands ago the date was:") + " " + self
.date_nhands_ago
[str(playerid
)] + " (playerid "+str(playerid
)+")"
773 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
774 print _("*** Database Error: ")+err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
776 # is get_stats_from_hand slow?
777 def get_stats_from_hand( self
, hand
, type # type is "ring" or "tour"
778 , hud_params
= {'hud_style':'A', 'agg_bb_mult':1000
779 ,'seats_style':'A', 'seats_cust_nums':['n/a', 'n/a', (2,2), (3,4), (3,5), (4,6), (5,7), (6,8), (7,9), (8,10), (8,10)]
780 ,'h_hud_style':'S', 'h_agg_bb_mult':1000
781 ,'h_seats_style':'A', 'h_seats_cust_nums':['n/a', 'n/a', (2,2), (3,4), (3,5), (4,6), (5,7), (6,8), (7,9), (8,10), (8,10)]
786 hud_style
= hud_params
['hud_style']
787 agg_bb_mult
= hud_params
['agg_bb_mult']
788 seats_style
= hud_params
['seats_style']
789 seats_cust_nums
= hud_params
['seats_cust_nums']
790 h_hud_style
= hud_params
['h_hud_style']
791 h_agg_bb_mult
= hud_params
['h_agg_bb_mult']
792 h_seats_style
= hud_params
['h_seats_style']
793 h_seats_cust_nums
= hud_params
['h_seats_cust_nums']
797 if seats_style
== 'A':
798 seats_min
, seats_max
= 0, 10
799 elif seats_style
== 'C':
800 seats_min
, seats_max
= seats_cust_nums
[num_seats
][0], seats_cust_nums
[num_seats
][1]
801 elif seats_style
== 'E':
802 seats_min
, seats_max
= num_seats
, num_seats
804 seats_min
, seats_max
= 0, 10
805 print "bad seats_style value:", seats_style
807 if h_seats_style
== 'A':
808 h_seats_min
, h_seats_max
= 0, 10
809 elif h_seats_style
== 'C':
810 h_seats_min
, h_seats_max
= h_seats_cust_nums
[num_seats
][0], h_seats_cust_nums
[num_seats
][1]
811 elif h_seats_style
== 'E':
812 h_seats_min
, h_seats_max
= num_seats
, num_seats
814 h_seats_min
, h_seats_max
= 0, 10
815 print "bad h_seats_style value:", h_seats_style
816 log
.info("opp seats style %s %d %d hero seats style %s %d %d"
817 % (seats_style
, seats_min
, seats_max
818 ,h_seats_style
, h_seats_min
, h_seats_max
) )
820 if hud_style
== 'S' or h_hud_style
== 'S':
821 self
.get_stats_from_hand_session(hand
, stat_dict
, hero_id
822 ,hud_style
, seats_min
, seats_max
823 ,h_hud_style
, h_seats_min
, h_seats_max
)
825 if hud_style
== 'S' and h_hud_style
== 'S':
829 stylekey
= self
.date_ndays_ago
830 elif hud_style
== 'A':
831 stylekey
= '0000000' # all stylekey values should be higher than this
832 elif hud_style
== 'S':
833 stylekey
= 'zzzzzzz' # all stylekey values should be lower than this
836 log
.info('hud_style: %s' % hud_style
)
838 #elif hud_style == 'H':
839 # stylekey = date_nhands_ago needs array by player here ...
841 if h_hud_style
== 'T':
842 h_stylekey
= self
.h_date_ndays_ago
843 elif h_hud_style
== 'A':
844 h_stylekey
= '0000000' # all stylekey values should be higher than this
845 elif h_hud_style
== 'S':
846 h_stylekey
= 'zzzzzzz' # all stylekey values should be lower than this
848 h_stylekey
= '00000000'
849 log
.info('h_hud_style: %s' % h_hud_style
)
851 #elif h_hud_style == 'H':
852 # h_stylekey = date_nhands_ago needs array by player here ...
854 query
= 'get_stats_from_hand_aggregated'
856 ,hero_id
, stylekey
, agg_bb_mult
, agg_bb_mult
, seats_min
, seats_max
# hero params
857 ,hero_id
, h_stylekey
, h_agg_bb_mult
, h_agg_bb_mult
, h_seats_min
, h_seats_max
) # villain params
859 #print "get stats: hud style =", hud_style, "query =", query, "subs =", subs
860 c
= self
.connection
.cursor()
863 c
.execute(self
.sql
.query
[query
], subs
)
864 #for row in c.fetchall(): # needs "explain query plan" in sql statement
865 # print "query plan: ", row
866 colnames
= [desc
[0] for desc
in c
.description
]
867 for row
in c
.fetchall():
869 if (playerid
== hero_id
and h_hud_style
!= 'S') or (playerid
!= hero_id
and hud_style
!= 'S'):
871 for name
, val
in zip(colnames
, row
):
872 t_dict
[name
.lower()] = val
874 stat_dict
[t_dict
['player_id']] = t_dict
878 # uses query on handsplayers instead of hudcache to get stats on just this session
879 def get_stats_from_hand_session(self
, hand
, stat_dict
, hero_id
880 ,hud_style
, seats_min
, seats_max
881 ,h_hud_style
, h_seats_min
, h_seats_max
):
882 """Get stats for just this session (currently defined as any play in the last 24 hours - to
883 be improved at some point ...)
884 h_hud_style and hud_style params indicate whether to get stats for hero and/or others
885 - only fetch heros stats if h_hud_style == 'S',
886 and only fetch others stats if hud_style == 'S'
887 seats_min/max params give seats limits, only include stats if between these values
890 query
= self
.sql
.query
['get_stats_from_hand_session']
891 if self
.db_server
== 'mysql':
892 query
= query
.replace("<signed>", 'signed ')
894 query
= query
.replace("<signed>", '')
896 subs
= (self
.hand_1day_ago
, hand
, hero_id
, seats_min
, seats_max
897 , hero_id
, h_seats_min
, h_seats_max
)
898 c
= self
.get_cursor()
901 #print "sess_stats: subs =", subs, "subs[0] =", subs[0]
902 c
.execute(query
, subs
)
903 colnames
= [desc
[0] for desc
in c
.description
]
907 if colnames
[0].lower() == 'player_id':
909 # Loop through stats adding them to appropriate stat_dict:
913 if (playerid
== hero_id
and h_hud_style
== 'S') or (playerid
!= hero_id
and hud_style
== 'S'):
914 for name
, val
in zip(colnames
, row
):
915 if not playerid
in stat_dict
:
916 stat_dict
[playerid
] = {}
917 stat_dict
[playerid
][name
.lower()] = val
918 elif not name
.lower() in stat_dict
[playerid
]:
919 stat_dict
[playerid
][name
.lower()] = val
920 elif name
.lower() not in ('hand_id', 'player_id', 'seat', 'screen_name', 'seats'):
921 #print "DEBUG: stat_dict[%s][%s]: %s" %(playerid, name.lower(), val)
922 stat_dict
[playerid
][name
.lower()] += val
924 if n
>= 10000: break # todo: don't think this is needed so set nice and high
925 # prevents infinite loop so leave for now - comment out or remove?
928 log
.error(_("ERROR: query %s result does not have player_id as first column") % (query
,))
930 #print " %d rows fetched, len(stat_dict) = %d" % (n, len(stat_dict))
932 #print "session stat_dict =", stat_dict
935 def get_player_id(self
, config
, siteName
, playerName
):
936 c
= self
.connection
.cursor()
937 siteNameUtf
= Charset
.to_utf8(siteName
)
938 playerNameUtf
= unicode(playerName
)
939 #print "db.get_player_id siteName",siteName,"playerName",playerName
940 c
.execute(self
.sql
.query
['get_player_id'], (playerNameUtf
, siteNameUtf
))
947 def get_player_names(self
, config
, site_id
=None, like_player_name
="%"):
948 """Fetch player names from players. Use site_id and like_player_name if provided"""
952 c
= self
.get_cursor()
953 p_name
= Charset
.to_utf8(like_player_name
)
954 c
.execute(self
.sql
.query
['get_player_names'], (p_name
, site_id
, site_id
))
958 def get_site_id(self
, site
):
959 c
= self
.get_cursor()
960 c
.execute(self
.sql
.query
['getSiteId'], (site
,))
961 result
= c
.fetchall()
964 def resetPlayerIDs(self
):
968 def getSqlPlayerIDs(self
, pnames
, siteid
):
970 if(self
.pcache
== None):
971 self
.pcache
= LambdaDict(lambda key
:self
.insertPlayer(key
[0], key
[1]))
973 for player
in pnames
:
974 result
[player
] = self
.pcache
[(player
,siteid
)]
975 # NOTE: Using the LambdaDict does the same thing as:
976 #if player in self.pcache:
977 # #print "DEBUG: cachehit"
980 # self.pcache[player] = self.insertPlayer(player, siteid)
981 #result[player] = self.pcache[player]
985 def insertPlayer(self
, name
, site_id
):
987 _name
= Charset
.to_db_utf8(name
)
988 c
= self
.get_cursor()
989 q
= "SELECT id, name FROM Players WHERE siteid=%s and name=%s"
990 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
992 #NOTE/FIXME?: MySQL has ON DUPLICATE KEY UPDATE
994 # INSERT INTO `tags` (`tag`, `count`)
996 # ON DUPLICATE KEY UPDATE `count`=`count`+1;
999 #print "DEBUG: name: %s site: %s" %(name, site_id)
1001 c
.execute (q
, (site_id
, _name
))
1004 if (tmp
== None): #new player
1005 c
.execute ("INSERT INTO Players (name, siteId) VALUES (%s, %s)".replace('%s',self
.sql
.query
['placeholder'])
1007 #Get last id might be faster here.
1008 #c.execute ("SELECT id FROM Players WHERE name=%s", (name,))
1009 result
= self
.get_last_insert_id(c
)
1014 def get_last_insert_id(self
, cursor
=None):
1017 if self
.backend
== self
.MYSQL_INNODB
:
1018 ret
= self
.connection
.insert_id()
1019 if ret
< 1 or ret
> 999999999:
1020 log
.warning(_("getLastInsertId(): problem fetching insert_id? ret=%d") % ret
)
1022 elif self
.backend
== self
.PGSQL
:
1024 # currval(hands_id_seq) - use name of implicit seq here
1025 # lastval() - still needs sequences set up?
1026 # insert ... returning is useful syntax (but postgres specific?)
1027 # see rules (fancy trigger type things)
1028 c
= self
.get_cursor()
1029 ret
= c
.execute ("SELECT lastval()")
1032 log
.warning(_("getLastInsertId(%s): problem fetching lastval? row=%d") % (seq
, row
))
1036 elif self
.backend
== self
.SQLITE
:
1037 ret
= cursor
.lastrowid
1039 log
.error(_("getLastInsertId(): unknown backend: %d") % self
.backend
)
1043 err
= traceback
.extract_tb(sys
.exc_info()[2])
1044 print _("*** Database get_last_insert_id error: ") + str(sys
.exc_info()[1])
1045 print "\n".join( [e
[0]+':'+str(e
[1])+" "+e
[2] for e
in err
] )
1050 def prepareBulkImport(self
):
1051 """Drop some indexes/foreign keys to prepare for bulk import.
1052 Currently keeping the standalone indexes as needed to import quickly"""
1054 c
= self
.get_cursor()
1055 # sc: don't think autocommit=0 is needed, should already be in that mode
1056 if self
.backend
== self
.MYSQL_INNODB
:
1057 c
.execute("SET foreign_key_checks=0")
1058 c
.execute("SET autocommit=0")
1060 if self
.backend
== self
.PGSQL
:
1061 self
.connection
.set_isolation_level(0) # allow table/index operations to work
1062 for fk
in self
.foreignKeys
[self
.backend
]:
1064 if self
.backend
== self
.MYSQL_INNODB
:
1065 c
.execute("SELECT constraint_name " +
1066 "FROM information_schema.KEY_COLUMN_USAGE " +
1067 #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
1069 "AND table_name = %s AND column_name = %s " +
1070 "AND referenced_table_name = %s " +
1071 "AND referenced_column_name = %s ",
1072 (fk
['fktab'], fk
['fkcol'], fk
['rtab'], fk
['rcol']) )
1074 #print "preparebulk find fk: cons=", cons
1076 print _("Dropping foreign key:"), cons
[0], fk
['fktab'], fk
['fkcol']
1078 c
.execute("alter table " + fk
['fktab'] + " drop foreign key " + cons
[0])
1080 print _("Warning:"), _("Drop foreign key %s_%s_fkey failed: %s, continuing ...") \
1081 % (fk
['fktab'], fk
['fkcol'], str(sys
.exc_value
).rstrip('\n') )
1082 elif self
.backend
== self
.PGSQL
:
1083 # DON'T FORGET TO RECREATE THEM!!
1084 print _("Dropping foreign key:"), fk
['fktab'], fk
['fkcol']
1086 # try to lock table to see if index drop will work:
1087 # hmmm, tested by commenting out rollback in grapher. lock seems to work but
1088 # then drop still hangs :-( does work in some tests though??
1089 # will leave code here for now pending further tests/enhancement ...
1090 c
.execute("BEGIN TRANSACTION")
1091 c
.execute( "lock table %s in exclusive mode nowait" % (fk
['fktab'],) )
1092 #print "after lock, status:", c.statusmessage
1093 #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
1095 c
.execute("alter table %s drop constraint %s_%s_fkey" % (fk
['fktab'], fk
['fktab'], fk
['fkcol']))
1096 print _("dropped foreign key %s_%s_fkey, continuing ...") % (fk
['fktab'], fk
['fkcol'])
1098 if "does not exist" not in str(sys
.exc_value
):
1099 print _("Warning:"), _("Drop foreign key %s_%s_fkey failed: %s, continuing ...") \
1100 % (fk
['fktab'], fk
['fkcol'], str(sys
.exc_value
).rstrip('\n') )
1101 c
.execute("END TRANSACTION")
1103 print _("Warning:"), _("constraint %s_%s_fkey not dropped: %s, continuing ...") \
1104 % (fk
['fktab'],fk
['fkcol'], str(sys
.exc_value
).rstrip('\n'))
1108 for idx
in self
.indexes
[self
.backend
]:
1109 if idx
['drop'] == 1:
1110 if self
.backend
== self
.MYSQL_INNODB
:
1111 print _("Dropping index:"), idx
['tab'], idx
['col']
1113 # apparently nowait is not implemented in mysql so this just hangs if there are locks
1114 # preventing the index drop :-(
1115 c
.execute( "alter table %s drop index %s;", (idx
['tab'],idx
['col']) )
1117 print _("Drop index failed:"), str(sys
.exc_info())
1118 # ALTER TABLE `fpdb`.`handsplayers` DROP INDEX `playerId`;
1119 # using: 'HandsPlayers' drop index 'playerId'
1120 elif self
.backend
== self
.PGSQL
:
1121 # DON'T FORGET TO RECREATE THEM!!
1122 print _("Dropping index:"), idx
['tab'], idx
['col']
1124 # try to lock table to see if index drop will work:
1125 c
.execute("BEGIN TRANSACTION")
1126 c
.execute( "lock table %s in exclusive mode nowait" % (idx
['tab'],) )
1127 #print "after lock, status:", c.statusmessage
1129 # table locked ok so index drop should work:
1130 #print "drop index %s_%s_idx" % (idx['tab'],idx['col'])
1131 c
.execute( "drop index if exists %s_%s_idx" % (idx
['tab'],idx
['col']) )
1132 #print "dropped pg index ", idx['tab'], idx['col']
1134 if "does not exist" not in str(sys
.exc_value
):
1135 print _("Warning:"), _("drop index %s_%s_idx failed: %s, continuing ...") \
1136 % (idx
['tab'],idx
['col'], str(sys
.exc_value
).rstrip('\n'))
1137 c
.execute("END TRANSACTION")
1139 print _("Warning:"), _("index %s_%s_idx not dropped: %s, continuing ...") \
1140 % (idx
['tab'],idx
['col'], str(sys
.exc_value
).rstrip('\n'))
1144 if self
.backend
== self
.PGSQL
:
1145 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1146 self
.commit() # seems to clear up errors if there were any in postgres
1147 ptime
= time() - stime
1148 print (_("prepare import took %s seconds") % ptime
)
1149 #end def prepareBulkImport
1151 def afterBulkImport(self
):
1152 """Re-create any dropped indexes/foreign keys after bulk import"""
1155 c
= self
.get_cursor()
1156 if self
.backend
== self
.MYSQL_INNODB
:
1157 c
.execute("SET foreign_key_checks=1")
1158 c
.execute("SET autocommit=1")
1161 if self
.backend
== self
.PGSQL
:
1162 self
.connection
.set_isolation_level(0) # allow table/index operations to work
1163 for fk
in self
.foreignKeys
[self
.backend
]:
1165 if self
.backend
== self
.MYSQL_INNODB
:
1166 c
.execute("SELECT constraint_name " +
1167 "FROM information_schema.KEY_COLUMN_USAGE " +
1168 #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
1170 "AND table_name = %s AND column_name = %s " +
1171 "AND referenced_table_name = %s " +
1172 "AND referenced_column_name = %s ",
1173 (fk
['fktab'], fk
['fkcol'], fk
['rtab'], fk
['rcol']) )
1175 #print "afterbulk: cons=", cons
1179 print _("Creating foreign key:"), fk
['fktab'], fk
['fkcol'], "->", fk
['rtab'], fk
['rcol']
1181 c
.execute("alter table " + fk
['fktab'] + " add foreign key ("
1182 + fk
['fkcol'] + ") references " + fk
['rtab'] + "("
1185 print _("Create foreign key failed:"), str(sys
.exc_info())
1186 elif self
.backend
== self
.PGSQL
:
1187 print _("Creating foreign key:"), fk
['fktab'], fk
['fkcol'], "->", fk
['rtab'], fk
['rcol']
1189 c
.execute("alter table " + fk
['fktab'] + " add constraint "
1190 + fk
['fktab'] + '_' + fk
['fkcol'] + '_fkey'
1191 + " foreign key (" + fk
['fkcol']
1192 + ") references " + fk
['rtab'] + "(" + fk
['rcol'] + ")")
1194 print _("Create foreign key failed:"), str(sys
.exc_info())
1198 for idx
in self
.indexes
[self
.backend
]:
1199 if idx
['drop'] == 1:
1200 if self
.backend
== self
.MYSQL_INNODB
:
1201 print _("Creating index %s %s") % (idx
['tab'], idx
['col'])
1203 s
= "alter table %s add index %s(%s)" % (idx
['tab'],idx
['col'],idx
['col'])
1206 print _("Create foreign key failed:"), str(sys
.exc_info())
1207 elif self
.backend
== self
.PGSQL
:
1209 # mod to use tab_col for index name?
1210 print _("Creating index %s %s") % (idx
['tab'], idx
['col'])
1212 s
= "create index %s_%s_idx on %s(%s)" % (idx
['tab'], idx
['col'], idx
['tab'], idx
['col'])
1215 print _("Create index failed:"), str(sys
.exc_info())
1219 if self
.backend
== self
.PGSQL
:
1220 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1221 self
.commit() # seems to clear up errors if there were any in postgres
1222 atime
= time() - stime
1223 print (_("After import took %s seconds") % atime
)
1224 #end def afterBulkImport
1226 def drop_referential_integrity(self
):
1227 """Update all tables to remove foreign keys"""
1229 c
= self
.get_cursor()
1230 c
.execute(self
.sql
.query
['list_tables'])
1231 result
= c
.fetchall()
1233 for i
in range(len(result
)):
1234 c
.execute("SHOW CREATE TABLE " + result
[i
][0])
1235 inner
= c
.fetchall()
1237 for j
in range(len(inner
)):
1238 # result[i][0] - Table name
1239 # result[i][1] - CREATE TABLE parameters
1240 #Searching for CONSTRAINT `tablename_ibfk_1`
1241 for m
in re
.finditer('(ibfk_[0-9]+)', inner
[j
][1]):
1242 key
= "`" + inner
[j
][0] + "_" + m
.group() + "`"
1243 c
.execute("ALTER TABLE " + inner
[j
][0] + " DROP FOREIGN KEY " + key
)
1245 #end drop_referential_inegrity
1247 def recreate_tables(self
):
1248 """(Re-)creates the tables of the current DB"""
1251 self
.resetPlayerIDs()
1252 self
.create_tables()
1253 self
.createAllIndexes()
1256 #print _("Finished recreating tables")
1257 log
.info(_("Finished recreating tables"))
1258 #end def recreate_tables
1260 def create_tables(self
):
1261 #todo: should detect and fail gracefully if tables already exist.
1263 log
.debug(self
.sql
.query
['createSettingsTable'])
1264 c
= self
.get_cursor()
1265 c
.execute(self
.sql
.query
['createSettingsTable'])
1267 log
.debug("Creating tables")
1268 c
.execute(self
.sql
.query
['createActionsTable'])
1269 c
.execute(self
.sql
.query
['createSitesTable'])
1270 c
.execute(self
.sql
.query
['createGametypesTable'])
1271 c
.execute(self
.sql
.query
['createFilesTable'])
1272 c
.execute(self
.sql
.query
['createPlayersTable'])
1273 c
.execute(self
.sql
.query
['createAutoratesTable'])
1274 c
.execute(self
.sql
.query
['createHandsTable'])
1275 c
.execute(self
.sql
.query
['createBoardsTable'])
1276 c
.execute(self
.sql
.query
['createTourneyTypesTable'])
1277 c
.execute(self
.sql
.query
['createTourneysTable'])
1278 c
.execute(self
.sql
.query
['createTourneysPlayersTable'])
1279 c
.execute(self
.sql
.query
['createHandsPlayersTable'])
1280 c
.execute(self
.sql
.query
['createHandsActionsTable'])
1281 c
.execute(self
.sql
.query
['createHudCacheTable'])
1282 c
.execute(self
.sql
.query
['createSessionsCacheTable'])
1283 c
.execute(self
.sql
.query
['createBackingsTable'])
1284 c
.execute(self
.sql
.query
['createRawHands'])
1285 c
.execute(self
.sql
.query
['createRawTourneys'])
1287 # Create sessionscache indexes
1288 log
.debug("Creating SessionsCache indexes")
1289 c
.execute(self
.sql
.query
['addSessionIdIndex'])
1290 c
.execute(self
.sql
.query
['addHandsSessionIdIndex'])
1291 c
.execute(self
.sql
.query
['addHandsGameSessionIdIndex'])
1293 # Create unique indexes:
1294 log
.debug("Creating unique indexes")
1295 c
.execute(self
.sql
.query
['addTourneyIndex'])
1296 c
.execute(self
.sql
.query
['addHandsIndex'])
1297 c
.execute(self
.sql
.query
['addPlayersIndex'])
1298 c
.execute(self
.sql
.query
['addTPlayersIndex'])
1299 c
.execute(self
.sql
.query
['addTTypesIndex'])
1301 self
.fillDefaultData()
1304 #print "Error creating tables: ", str(sys.exc_value)
1305 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
1306 print _("***Error creating tables:"), err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
1311 def drop_tables(self
):
1312 """Drops the fpdb tables from the current db"""
1314 c
= self
.get_cursor()
1316 print _("*** Error unable to get databasecursor")
1318 backend
= self
.get_backend_name()
1319 if backend
== 'MySQL InnoDB': # what happens if someone is using MyISAM?
1321 self
.drop_referential_integrity() # needed to drop tables with foreign keys
1322 c
.execute(self
.sql
.query
['list_tables'])
1323 tables
= c
.fetchall()
1324 for table
in tables
:
1325 c
.execute(self
.sql
.query
['drop_table'] + table
[0])
1327 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
1328 print _("***Error dropping tables:"), +err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
1330 elif backend
== 'PostgreSQL':
1333 c
.execute(self
.sql
.query
['list_tables'])
1334 tables
= c
.fetchall()
1335 for table
in tables
:
1336 c
.execute(self
.sql
.query
['drop_table'] + table
[0] + ' cascade')
1338 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
1339 print _("***Error dropping tables:"), err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
1341 elif backend
== 'SQLite':
1343 c
.execute(self
.sql
.query
['list_tables'])
1344 for table
in c
.fetchall():
1345 log
.debug(self
.sql
.query
['drop_table'] + table
[0])
1346 c
.execute(self
.sql
.query
['drop_table'] + table
[0])
1348 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
1349 print _("***Error dropping tables:"), err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
1354 print _("*** Error in committing table drop")
1355 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
1356 print _("***Error dropping tables:"), err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
1358 #end def drop_tables
1360 def createAllIndexes(self
):
1361 """Create new indexes"""
1364 if self
.backend
== self
.PGSQL
:
1365 self
.connection
.set_isolation_level(0) # allow table/index operations to work
1366 for idx
in self
.indexes
[self
.backend
]:
1367 if self
.backend
== self
.MYSQL_INNODB
:
1368 print _("Creating index %s %s") %(idx
['tab'], idx
['col'])
1369 log
.debug(_("Creating index %s %s") %(idx
['tab'], idx
['col']))
1371 s
= "create index %s on %s(%s)" % (idx
['col'],idx
['tab'],idx
['col'])
1372 self
.get_cursor().execute(s
)
1374 print _("Create index failed:"), str(sys
.exc_info())
1375 elif self
.backend
== self
.PGSQL
:
1376 # mod to use tab_col for index name?
1377 print _("Creating index %s %s") %(idx
['tab'], idx
['col'])
1378 log
.debug(_("Creating index %s %s") %(idx
['tab'], idx
['col']))
1380 s
= "create index %s_%s_idx on %s(%s)" % (idx
['tab'], idx
['col'], idx
['tab'], idx
['col'])
1381 self
.get_cursor().execute(s
)
1383 print _("Create index failed:"), str(sys
.exc_info())
1384 elif self
.backend
== self
.SQLITE
:
1385 print _("Creating index %s %s") %(idx
['tab'], idx
['col'])
1386 log
.debug(_("Creating index %s %s") %(idx
['tab'], idx
['col']))
1388 s
= "create index %s_%s_idx on %s(%s)" % (idx
['tab'], idx
['col'], idx
['tab'], idx
['col'])
1389 self
.get_cursor().execute(s
)
1391 log
.debug(_("Create index failed:"), str(sys
.exc_info()))
1394 if self
.backend
== self
.PGSQL
:
1395 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1397 print _("Error creating indexes:"), str(sys
.exc_value
)
1398 raise FpdbError("Error creating indexes:" + " " + str(sys
.exc_value
) )
1399 #end def createAllIndexes
1401 def dropAllIndexes(self
):
1402 """Drop all standalone indexes (i.e. not including primary keys or foreign keys)
1403 using list of indexes in indexes data structure"""
1404 # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
1405 if self
.backend
== self
.PGSQL
:
1406 self
.connection
.set_isolation_level(0) # allow table/index operations to work
1407 for idx
in self
.indexes
[self
.backend
]:
1408 if self
.backend
== self
.MYSQL_INNODB
:
1409 print (_("Dropping index:"), idx
['tab'], idx
['col'])
1411 self
.get_cursor().execute( "alter table %s drop index %s"
1412 , (idx
['tab'], idx
['col']) )
1414 print _("Drop index failed:"), str(sys
.exc_info())
1415 elif self
.backend
== self
.PGSQL
:
1416 print (_("Dropping index:"), idx
['tab'], idx
['col'])
1417 # mod to use tab_col for index name?
1419 self
.get_cursor().execute( "drop index %s_%s_idx"
1420 % (idx
['tab'],idx
['col']) )
1422 print (_("Drop index failed:"), str(sys
.exc_info()))
1423 elif self
.backend
== self
.SQLITE
:
1424 print (_("Dropping index:"), idx
['tab'], idx
['col'])
1426 self
.get_cursor().execute( "drop index %s_%s_idx"
1427 % (idx
['tab'],idx
['col']) )
1429 print _("Drop index failed:"), str(sys
.exc_info())
1432 if self
.backend
== self
.PGSQL
:
1433 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1434 #end def dropAllIndexes
1436 def createAllForeignKeys(self
):
1437 """Create foreign keys"""
1440 if self
.backend
== self
.PGSQL
:
1441 self
.connection
.set_isolation_level(0) # allow table/index operations to work
1442 c
= self
.get_cursor()
1444 print _("set_isolation_level failed:"), str(sys
.exc_info())
1446 for fk
in self
.foreignKeys
[self
.backend
]:
1447 if self
.backend
== self
.MYSQL_INNODB
:
1448 c
.execute("SELECT constraint_name " +
1449 "FROM information_schema.KEY_COLUMN_USAGE " +
1450 #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
1452 "AND table_name = %s AND column_name = %s " +
1453 "AND referenced_table_name = %s " +
1454 "AND referenced_column_name = %s ",
1455 (fk
['fktab'], fk
['fkcol'], fk
['rtab'], fk
['rcol']) )
1457 #print "afterbulk: cons=", cons
1461 print _("Creating foreign key:"), fk
['fktab'], fk
['fkcol'], "->", fk
['rtab'], fk
['rcol']
1463 c
.execute("alter table " + fk
['fktab'] + " add foreign key ("
1464 + fk
['fkcol'] + ") references " + fk
['rtab'] + "("
1467 print _("Create foreign key failed:"), str(sys
.exc_info())
1468 elif self
.backend
== self
.PGSQL
:
1469 print _("Creating foreign key:"), fk
['fktab'], fk
['fkcol'], "->", fk
['rtab'], fk
['rcol']
1471 c
.execute("alter table " + fk
['fktab'] + " add constraint "
1472 + fk
['fktab'] + '_' + fk
['fkcol'] + '_fkey'
1473 + " foreign key (" + fk
['fkcol']
1474 + ") references " + fk
['rtab'] + "(" + fk
['rcol'] + ")")
1476 print _("Create foreign key failed:"), str(sys
.exc_info())
1481 if self
.backend
== self
.PGSQL
:
1482 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1484 print _("set_isolation_level failed:"), str(sys
.exc_info())
1485 #end def createAllForeignKeys
1487 def dropAllForeignKeys(self
):
1488 """Drop all standalone indexes (i.e. not including primary keys or foreign keys)
1489 using list of indexes in indexes data structure"""
1490 # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
1491 if self
.backend
== self
.PGSQL
:
1492 self
.connection
.set_isolation_level(0) # allow table/index operations to work
1493 c
= self
.get_cursor()
1495 for fk
in self
.foreignKeys
[self
.backend
]:
1496 if self
.backend
== self
.MYSQL_INNODB
:
1497 c
.execute("SELECT constraint_name " +
1498 "FROM information_schema.KEY_COLUMN_USAGE " +
1499 #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
1501 "AND table_name = %s AND column_name = %s " +
1502 "AND referenced_table_name = %s " +
1503 "AND referenced_column_name = %s ",
1504 (fk
['fktab'], fk
['fkcol'], fk
['rtab'], fk
['rcol']) )
1506 #print "preparebulk find fk: cons=", cons
1508 print _("Dropping foreign key:"), cons
[0], fk
['fktab'], fk
['fkcol']
1510 c
.execute("alter table " + fk
['fktab'] + " drop foreign key " + cons
[0])
1512 print _("Warning:"), _("Drop foreign key %s_%s_fkey failed: %s, continuing ...") \
1513 % (fk
['fktab'], fk
['fkcol'], str(sys
.exc_value
).rstrip('\n') )
1514 elif self
.backend
== self
.PGSQL
:
1515 # DON'T FORGET TO RECREATE THEM!!
1516 print _("Dropping foreign key:"), fk
['fktab'], fk
['fkcol']
1518 # try to lock table to see if index drop will work:
1519 # hmmm, tested by commenting out rollback in grapher. lock seems to work but
1520 # then drop still hangs :-( does work in some tests though??
1521 # will leave code here for now pending further tests/enhancement ...
1522 c
.execute("BEGIN TRANSACTION")
1523 c
.execute( "lock table %s in exclusive mode nowait" % (fk
['fktab'],) )
1524 #print "after lock, status:", c.statusmessage
1525 #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
1527 c
.execute("alter table %s drop constraint %s_%s_fkey" % (fk
['fktab'], fk
['fktab'], fk
['fkcol']))
1528 print _("dropped foreign key %s_%s_fkey, continuing ...") % (fk
['fktab'], fk
['fkcol'])
1530 if "does not exist" not in str(sys
.exc_value
):
1531 print _("Warning:"), _("Drop foreign key %s_%s_fkey failed: %s, continuing ...") \
1532 % (fk
['fktab'], fk
['fkcol'], str(sys
.exc_value
).rstrip('\n') )
1533 c
.execute("END TRANSACTION")
1535 print _("Warning:"), _("constraint %s_%s_fkey not dropped: %s, continuing ...") \
1536 % (fk
['fktab'],fk
['fkcol'], str(sys
.exc_value
).rstrip('\n'))
1538 #print _("Only MySQL and Postgres supported so far")
1541 if self
.backend
== self
.PGSQL
:
1542 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1543 #end def dropAllForeignKeys
1546 def fillDefaultData(self
):
1547 c
= self
.get_cursor()
1548 c
.execute("INSERT INTO Settings (version) VALUES (%s);" % (DB_VERSION
))
1550 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('1', 'Full Tilt Poker', 'FT')")
1551 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('2', 'PokerStars', 'PS')")
1552 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('3', 'Everleaf', 'EV')")
1553 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('4', 'Win2day', 'W2')")
1554 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('5', 'OnGame', 'OG')")
1555 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('6', 'UltimateBet', 'UB')")
1556 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('7', 'Betfair', 'BF')")
1557 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('8', 'Absolute', 'AB')")
1558 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('9', 'PartyPoker', 'PP')")
1559 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('10', 'PacificPoker', 'P8')")
1560 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('11', 'Partouche', 'PA')")
1561 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('12', 'Carbon', 'CA')")
1562 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('13', 'PKR', 'PK')")
1563 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('14', 'iPoker', 'IP')")
1564 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('15', 'Winamax', 'WM')")
1565 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('16', 'Everest', 'EP')")
1566 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('17', 'Cake', 'CK')")
1568 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('1', 'ante', 'A')")
1569 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('2', 'small blind', 'SB')")
1570 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('3', 'secondsb', 'SSB')")
1571 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('4', 'big blind', 'BB')")
1572 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('5', 'both', 'SBBB')")
1573 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('6', 'calls', 'C')")
1574 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('7', 'raises', 'R')")
1575 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('8', 'bets', 'B')")
1576 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('9', 'stands pat', 'S')")
1577 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('10', 'folds', 'F')")
1578 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('11', 'checks', 'K')")
1579 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('12', 'discards', 'D')")
1580 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('13', 'bringin', 'I')")
1581 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('14', 'completes', 'P')")
1583 #end def fillDefaultData
1585 def rebuild_indexes(self
, start
=None):
1586 self
.dropAllIndexes()
1587 self
.createAllIndexes()
1588 self
.dropAllForeignKeys()
1589 self
.createAllForeignKeys()
1590 #end def rebuild_indexes
1592 def rebuild_hudcache(self
, h_start
=None, v_start
=None):
1593 """clears hudcache and rebuilds from the individual handsplayers records"""
1597 # derive list of program owner's player ids
1598 self
.hero
= {} # name of program owner indexed by site id
1599 self
.hero_ids
= {'dummy':-53, 'dummy2':-52} # playerid of owner indexed by site id
1600 # make sure at least two values in list
1601 # so that tuple generation creates doesn't use
1603 for site
in self
.config
.get_supported_sites():
1604 result
= self
.get_site_id(site
)
1606 site_id
= result
[0][0]
1607 self
.hero
[site_id
] = self
.config
.supported_sites
[site
].screen_name
1608 p_id
= self
.get_player_id(self
.config
, site
, self
.hero
[site_id
])
1610 self
.hero_ids
[site_id
] = int(p_id
)
1613 h_start
= self
.hero_hudstart_def
1615 v_start
= self
.villain_hudstart_def
1617 if self
.hero_ids
== {}:
1618 where
= "WHERE hp.tourneysPlayersId IS NULL"
1620 where
= "where ((( hp.playerId not in " + str(tuple(self
.hero_ids
.values())) \
1621 + " and h.startTime > '" + v_start
+ "')" \
1622 + " or ( hp.playerId in " + str(tuple(self
.hero_ids
.values())) \
1623 + " and h.startTime > '" + h_start
+ "'))" \
1624 + " AND hp.tourneysPlayersId IS NULL)"
1625 rebuild_sql_cash
= self
.sql
.query
['rebuildHudCache'].replace('<tourney_insert_clause>', "")
1626 rebuild_sql_cash
= rebuild_sql_cash
.replace('<tourney_select_clause>', "")
1627 rebuild_sql_cash
= rebuild_sql_cash
.replace('<tourney_join_clause>', "")
1628 rebuild_sql_cash
= rebuild_sql_cash
.replace('<tourney_group_clause>', "")
1629 rebuild_sql_cash
= rebuild_sql_cash
.replace('<where_clause>', where
)
1630 #print "rebuild_sql_cash:",rebuild_sql_cash
1631 self
.get_cursor().execute(self
.sql
.query
['clearHudCache'])
1632 self
.get_cursor().execute(rebuild_sql_cash
)
1634 if self
.hero_ids
== {}:
1635 where
= "WHERE hp.tourneysPlayersId >= 0"
1637 where
= "where ((( hp.playerId not in " + str(tuple(self
.hero_ids
.values())) \
1638 + " and h.startTime > '" + v_start
+ "')" \
1639 + " or ( hp.playerId in " + str(tuple(self
.hero_ids
.values())) \
1640 + " and h.startTime > '" + h_start
+ "'))" \
1641 + " AND hp.tourneysPlayersId >= 0)"
1642 rebuild_sql_tourney
= self
.sql
.query
['rebuildHudCache'].replace('<tourney_insert_clause>', ",tourneyTypeId")
1643 rebuild_sql_tourney
= rebuild_sql_tourney
.replace('<tourney_select_clause>', ",t.tourneyTypeId")
1644 rebuild_sql_tourney
= rebuild_sql_tourney
.replace('<tourney_join_clause>', """INNER JOIN TourneysPlayers tp ON (tp.id = hp.tourneysPlayersId)
1645 INNER JOIN Tourneys t ON (t.id = tp.tourneyId)""")
1646 rebuild_sql_tourney
= rebuild_sql_tourney
.replace('<tourney_group_clause>', ",t.tourneyTypeId")
1647 rebuild_sql_tourney
= rebuild_sql_tourney
.replace('<where_clause>', where
)
1648 #print "rebuild_sql_tourney:",rebuild_sql_tourney
1650 self
.get_cursor().execute(rebuild_sql_tourney
)
1652 print _("Rebuild hudcache took %.1f seconds") % (time() - stime
,)
1654 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
1655 print _("Error rebuilding hudcache:"), str(sys
.exc_value
)
1657 #end def rebuild_hudcache
1659 def rebuild_sessionscache(self
):
1660 """clears sessionscache and rebuilds from the individual records"""
1662 for site
in self
.config
.get_supported_sites():
1663 result
= self
.get_site_id(site
)
1665 site_id
= result
[0][0]
1666 hero
= self
.config
.supported_sites
[site
].screen_name
1667 p_id
= self
.get_player_id(self
.config
, site
, hero
)
1669 heros
.append(int(p_id
))
1671 rebuildSessionsCache
= self
.sql
.query
['rebuildSessionsCache']
1672 rebuildSessionsCacheSum
= self
.sql
.query
['rebuildSessionsCacheSum']
1677 elif len(heros
) > 0:
1678 where
= str(heros
[0])
1679 where_summary
= str(heros
[0])
1683 where
= where
+ ' OR HandsPlayers.playerId = %s' % str(i
)
1684 where_summary
= where_summary
+ ' OR TourneysPlayers.playerId = %s' % str(i
)
1685 rebuildSessionsCache
= rebuildSessionsCache
.replace('<where_clause>', where
)
1686 rebuildSessionsCacheSum
= rebuildSessionsCacheSum
.replace('<where_clause>', where_summary
)
1688 c
= self
.get_cursor()
1689 c
.execute(self
.sql
.query
['clearSessionsCache'])
1692 sc
, gsc
= {'bk': []}, {'bk': []}
1693 c
.execute(rebuildSessionsCache
)
1696 pids
, game
, pdata
= {}, {}, {}
1700 pids
['pname'] = tmp
[2]
1702 game
['type'] = tmp
[4]
1703 pdata
['pname']['totalProfit'] = tmp
[5]
1704 pdata
['pname']['tourneyTypeId'] = tmp
[6]
1705 pdata
['pname']['street0VPI'] = tmp
[7]
1706 pdata
['pname']['street1Seen'] = tmp
[8]
1708 sc
= self
.prepSessionsCache (id, pids
, startTime
, sc
, heros
, tmp
== None)
1709 gsc
= self
.storeSessionsCache(id, pids
, startTime
, game
, gid
, pdata
, sc
, gsc
, None, heros
, tmp
== None)
1711 for i
, id in sc
.iteritems():
1715 c
.execute("UPDATE Hands SET sessionId = %s, gameSessionId = %s WHERE id = %s", (sid
, gid
, i
))
1719 sc
, gsc
= {'bk': []}, {'bk': []}
1720 c
.execute(rebuildSessionsCacheSum
)
1723 pids
, game
, info
= {}, {}, {}
1726 pids
['pname'] = tmp
[2]
1727 game
['type'] = 'summary'
1728 info
['tourneyTypeId'] = tmp
[3]
1729 info
['winnings'] = {}
1730 info
['winnings']['pname'] = tmp
[4]
1731 info
['winningsCurrency'] = {}
1732 info
['winningsCurrency']['pname'] = tmp
[5]
1733 info
['buyinCurrency'] = tmp
[6]
1734 info
['buyin'] = tmp
[7]
1735 info
['fee'] = tmp
[8]
1737 sc
= self
.prepSessionsCache (id, pids
, startTime
, sc
, heros
, tmp
== None)
1738 gsc
= self
.storeSessionsCache(id, pids
, startTime
, game
, None, info
, sc
, gsc
, None, heros
, tmp
== None)
1742 def get_hero_hudcache_start(self
):
1743 """fetches earliest stylekey from hudcache for one of hero's player ids"""
1746 # derive list of program owner's player ids
1747 self
.hero
= {} # name of program owner indexed by site id
1748 self
.hero_ids
= {'dummy':-53, 'dummy2':-52} # playerid of owner indexed by site id
1749 # make sure at least two values in list
1750 # so that tuple generation creates doesn't use
1752 for site
in self
.config
.get_supported_sites():
1753 result
= self
.get_site_id(site
)
1755 site_id
= result
[0][0]
1756 self
.hero
[site_id
] = self
.config
.supported_sites
[site
].screen_name
1757 p_id
= self
.get_player_id(self
.config
, site
, self
.hero
[site_id
])
1759 self
.hero_ids
[site_id
] = int(p_id
)
1761 q
= self
.sql
.query
['get_hero_hudcache_start'].replace("<playerid_list>", str(tuple(self
.hero_ids
.values())))
1762 c
= self
.get_cursor()
1766 return self
.hero_hudstart_def
1768 return "20"+tmp
[0][1:3] + "-" + tmp
[0][3:5] + "-" + tmp
[0][5:7]
1770 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
1771 print _("Error rebuilding hudcache:"), str(sys
.exc_value
)
1773 #end def get_hero_hudcache_start
1776 def analyzeDB(self
):
1777 """Do whatever the DB can offer to update index/table statistics"""
1779 if self
.backend
== self
.MYSQL_INNODB
:
1781 self
.get_cursor().execute(self
.sql
.query
['analyze'])
1783 print _("Error during analyze:"), str(sys
.exc_value
)
1784 elif self
.backend
== self
.PGSQL
:
1785 self
.connection
.set_isolation_level(0) # allow analyze to work
1787 self
.get_cursor().execute(self
.sql
.query
['analyze'])
1789 print _("Error during analyze:"), str(sys
.exc_value
)
1790 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1792 atime
= time() - stime
1793 log
.info(_("Analyze took %.1f seconds") % (atime
,))
1797 """Do whatever the DB can offer to update index/table statistics"""
1799 if self
.backend
== self
.MYSQL_INNODB
:
1801 self
.get_cursor().execute(self
.sql
.query
['vacuum'])
1803 print _("Error during vacuum:"), str(sys
.exc_value
)
1804 elif self
.backend
== self
.PGSQL
:
1805 self
.connection
.set_isolation_level(0) # allow vacuum to work
1807 self
.get_cursor().execute(self
.sql
.query
['vacuum'])
1809 print _("Error during vacuum:"), str(sys
.exc_value
)
1810 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1812 atime
= time() - stime
1813 print _("Vacuum took %.1f seconds") % (atime
,)
1816 # Start of Hand Writing routines. Idea is to provide a mixture of routines to store Hand data
1817 # however the calling prog requires. Main aims:
1818 # - existing static routines from fpdb_simple just modified
1820 def setThreadId(self
, threadid
):
1821 self
.threadId
= threadid
1823 def acquireLock(self
, wait
=True, retry_time
=.01):
1824 while not self
._has
_lock
:
1825 cursor
= self
.get_cursor()
1826 cursor
.execute(self
.sql
.query
['selectLock'])
1827 record
= cursor
.fetchall()
1830 cursor
.execute(self
.sql
.query
['switchLock'], (True, self
.threadId
))
1832 self
._has
_lock
= True
1835 cursor
.execute(self
.sql
.query
['missedLock'], (1, self
.threadId
))
1841 def releaseLock(self
):
1843 cursor
= self
.get_cursor()
1844 num
= cursor
.execute(self
.sql
.query
['switchLock'], (False, self
.threadId
))
1846 self
._has
_lock
= False
1848 def lock_for_insert(self
):
1849 """Lock tables in MySQL to try to speed inserts up"""
1851 self
.get_cursor().execute(self
.sql
.query
['lockForInsert'])
1853 print _("Error during lock_for_insert:"), str(sys
.exc_value
)
1854 #end def lock_for_insert
1856 ###########################
1858 ###########################
1860 def storeHand(self
, hdata
, hbulk
, doinsert
= False, printdata
= False):
1862 print _("######## Hands ##########")
1864 pp
= pprint
.PrettyPrinter(indent
=4)
1866 print _("###### End Hands ########")
1868 # Tablename can have odd charachers
1869 hdata
['tableName'] = Charset
.to_db_utf8(hdata
['tableName'])
1871 hbulk
.append( [ hdata
['tableName'],
1872 hdata
['siteHandNo'],
1874 hdata
['gametypeId'],
1876 hdata
['gameSessionId'],
1879 datetime
.utcnow(), #importtime
1882 hdata
['playersVpi'],
1883 hdata
['boardcard1'],
1884 hdata
['boardcard2'],
1885 hdata
['boardcard3'],
1886 hdata
['boardcard4'],
1887 hdata
['boardcard5'],
1888 hdata
['runItTwice'],
1889 hdata
['playersAtStreet1'],
1890 hdata
['playersAtStreet2'],
1891 hdata
['playersAtStreet3'],
1892 hdata
['playersAtStreet4'],
1893 hdata
['playersAtShowdown'],
1894 hdata
['street0Raises'],
1895 hdata
['street1Raises'],
1896 hdata
['street2Raises'],
1897 hdata
['street3Raises'],
1898 hdata
['street4Raises'],
1899 hdata
['street1Pot'],
1900 hdata
['street2Pot'],
1901 hdata
['street3Pot'],
1902 hdata
['street4Pot'],
1903 hdata
['showdownPot'],
1912 if hdata
['sc'] and hdata
['gsc']:
1913 h
[4] = hdata
['sc'][id]['id']
1914 h
[5] = hdata
['gsc'][id]['id']
1918 q
= self
.sql
.query
['store_hand']
1919 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
1920 c
= self
.get_cursor()
1921 c
.executemany(q
, hbulk
)
1922 q
= self
.sql
.query
['store_boards']
1923 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
1924 c
= self
.get_cursor()
1925 c
.executemany(q
, bbulk
)
1929 def storeHandsPlayers(self
, hid
, pids
, pdata
, hpbulk
, doinsert
= False, printdata
= False):
1930 #print "DEBUG: %s %s %s" %(hid, pids, pdata)
1933 pp
= pprint
.PrettyPrinter(indent
=4)
1937 hpbulk
.append( ( hid
,
1939 pdata
[p
]['startCash'],
1962 pdata
[p
]['winnings'],
1964 pdata
[p
]['totalProfit'],
1965 pdata
[p
]['street0VPI'],
1966 pdata
[p
]['street1Seen'],
1967 pdata
[p
]['street2Seen'],
1968 pdata
[p
]['street3Seen'],
1969 pdata
[p
]['street4Seen'],
1970 pdata
[p
]['sawShowdown'],
1972 pdata
[p
]['wonAtSD'],
1973 pdata
[p
]['street0Aggr'],
1974 pdata
[p
]['street1Aggr'],
1975 pdata
[p
]['street2Aggr'],
1976 pdata
[p
]['street3Aggr'],
1977 pdata
[p
]['street4Aggr'],
1978 pdata
[p
]['street1CBChance'],
1979 pdata
[p
]['street2CBChance'],
1980 pdata
[p
]['street3CBChance'],
1981 pdata
[p
]['street4CBChance'],
1982 pdata
[p
]['street1CBDone'],
1983 pdata
[p
]['street2CBDone'],
1984 pdata
[p
]['street3CBDone'],
1985 pdata
[p
]['street4CBDone'],
1986 pdata
[p
]['wonWhenSeenStreet1'],
1987 pdata
[p
]['wonWhenSeenStreet2'],
1988 pdata
[p
]['wonWhenSeenStreet3'],
1989 pdata
[p
]['wonWhenSeenStreet4'],
1990 pdata
[p
]['street0Calls'],
1991 pdata
[p
]['street1Calls'],
1992 pdata
[p
]['street2Calls'],
1993 pdata
[p
]['street3Calls'],
1994 pdata
[p
]['street4Calls'],
1995 pdata
[p
]['street0Bets'],
1996 pdata
[p
]['street1Bets'],
1997 pdata
[p
]['street2Bets'],
1998 pdata
[p
]['street3Bets'],
1999 pdata
[p
]['street4Bets'],
2000 pdata
[p
]['position'],
2001 pdata
[p
]['tourneysPlayersIds'],
2002 pdata
[p
]['startCards'],
2003 pdata
[p
]['street0_3BChance'],
2004 pdata
[p
]['street0_3BDone'],
2005 pdata
[p
]['street0_4BChance'],
2006 pdata
[p
]['street0_4BDone'],
2007 pdata
[p
]['street0_C4BChance'],
2008 pdata
[p
]['street0_C4BDone'],
2009 pdata
[p
]['street0_FoldTo3BChance'],
2010 pdata
[p
]['street0_FoldTo3BDone'],
2011 pdata
[p
]['street0_FoldTo4BChance'],
2012 pdata
[p
]['street0_FoldTo4BDone'],
2013 pdata
[p
]['street0_SqueezeChance'],
2014 pdata
[p
]['street0_SqueezeDone'],
2015 pdata
[p
]['raiseToStealChance'],
2016 pdata
[p
]['raiseToStealDone'],
2017 pdata
[p
]['success_Steal'],
2018 pdata
[p
]['otherRaisedStreet0'],
2019 pdata
[p
]['otherRaisedStreet1'],
2020 pdata
[p
]['otherRaisedStreet2'],
2021 pdata
[p
]['otherRaisedStreet3'],
2022 pdata
[p
]['otherRaisedStreet4'],
2023 pdata
[p
]['foldToOtherRaisedStreet0'],
2024 pdata
[p
]['foldToOtherRaisedStreet1'],
2025 pdata
[p
]['foldToOtherRaisedStreet2'],
2026 pdata
[p
]['foldToOtherRaisedStreet3'],
2027 pdata
[p
]['foldToOtherRaisedStreet4'],
2028 pdata
[p
]['raiseFirstInChance'],
2029 pdata
[p
]['raisedFirstIn'],
2030 pdata
[p
]['foldBbToStealChance'],
2031 pdata
[p
]['foldedBbToSteal'],
2032 pdata
[p
]['foldSbToStealChance'],
2033 pdata
[p
]['foldedSbToSteal'],
2034 pdata
[p
]['foldToStreet1CBChance'],
2035 pdata
[p
]['foldToStreet1CBDone'],
2036 pdata
[p
]['foldToStreet2CBChance'],
2037 pdata
[p
]['foldToStreet2CBDone'],
2038 pdata
[p
]['foldToStreet3CBChance'],
2039 pdata
[p
]['foldToStreet3CBDone'],
2040 pdata
[p
]['foldToStreet4CBChance'],
2041 pdata
[p
]['foldToStreet4CBDone'],
2042 pdata
[p
]['street1CheckCallRaiseChance'],
2043 pdata
[p
]['street1CheckCallRaiseDone'],
2044 pdata
[p
]['street2CheckCallRaiseChance'],
2045 pdata
[p
]['street2CheckCallRaiseDone'],
2046 pdata
[p
]['street3CheckCallRaiseChance'],
2047 pdata
[p
]['street3CheckCallRaiseDone'],
2048 pdata
[p
]['street4CheckCallRaiseChance'],
2049 pdata
[p
]['street4CheckCallRaiseDone'],
2050 pdata
[p
]['street0Raises'],
2051 pdata
[p
]['street1Raises'],
2052 pdata
[p
]['street2Raises'],
2053 pdata
[p
]['street3Raises'],
2054 pdata
[p
]['street4Raises']
2058 q
= self
.sql
.query
['store_hands_players']
2059 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2060 c
= self
.get_cursor()
2061 c
.executemany(q
, hpbulk
)
2064 def storeHandsActions(self
, hid
, pids
, adata
, habulk
, doinsert
= False, printdata
= False):
2065 #print "DEBUG: %s %s %s" %(hid, pids, adata)
2067 # This can be used to generate test data. Currently unused
2070 # pp = pprint.PrettyPrinter(indent=4)
2074 habulk
.append( (hid
,
2075 pids
[adata
[a
]['player']],
2077 adata
[a
]['actionNo'],
2078 adata
[a
]['streetActionNo'],
2079 adata
[a
]['actionId'],
2081 adata
[a
]['raiseTo'],
2082 adata
[a
]['amountCalled'],
2083 adata
[a
]['numDiscarded'],
2084 adata
[a
]['cardsDiscarded'],
2089 q
= self
.sql
.query
['store_hands_actions']
2090 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2091 c
= self
.get_cursor()
2092 c
.executemany(q
, habulk
)
2095 def storeHudCache(self
, gid
, pids
, starttime
, pdata
, hcbulk
, doinsert
= False):
2096 """Update cached statistics. If update fails because no record exists, do an insert."""
2098 tz
= datetime
.utcnow() - datetime
.today()
2099 tz_offset
= tz
.seconds
/3600
2100 tz_day_start_offset
= self
.day_start
+ tz_offset
2102 d
= timedelta(hours
=tz_day_start_offset
)
2103 starttime_offset
= starttime
- d
2105 if self
.use_date_in_hudcache
:
2106 styleKey
= datetime
.strftime(starttime_offset
, 'd%y%m%d')
2107 #styleKey = "d%02d%02d%02d" % (hand_start_time.year-2000, hand_start_time.month, hand_start_time.day)
2109 # hard-code styleKey as 'A000000' (all-time cache, no key) for now
2110 styleKey
= 'A000000'
2112 update_hudcache
= self
.sql
.query
['update_hudcache']
2113 update_hudcache
= update_hudcache
.replace('%s', self
.sql
.query
['placeholder'])
2114 insert_hudcache
= self
.sql
.query
['insert_hudcache']
2115 insert_hudcache
= insert_hudcache
.replace('%s', self
.sql
.query
['placeholder'])
2117 #print "DEBUG: %s %s %s" %(hid, pids, pdata)
2120 #NOTE: Insert new stats at right place because SQL needs strict order
2122 line
.append(1) # HDs
2123 line
.append(pdata
[p
]['street0VPI'])
2124 line
.append(pdata
[p
]['street0Aggr'])
2125 line
.append(pdata
[p
]['street0_3BChance'])
2126 line
.append(pdata
[p
]['street0_3BDone'])
2127 line
.append(pdata
[p
]['street0_4BChance'])
2128 line
.append(pdata
[p
]['street0_4BDone'])
2129 line
.append(pdata
[p
]['street0_C4BChance'])
2130 line
.append(pdata
[p
]['street0_C4BDone'])
2131 line
.append(pdata
[p
]['street0_FoldTo3BChance'])
2132 line
.append(pdata
[p
]['street0_FoldTo3BDone'])
2133 line
.append(pdata
[p
]['street0_FoldTo4BChance'])
2134 line
.append(pdata
[p
]['street0_FoldTo4BDone'])
2135 line
.append(pdata
[p
]['street0_SqueezeChance'])
2136 line
.append(pdata
[p
]['street0_SqueezeDone'])
2137 line
.append(pdata
[p
]['raiseToStealChance'])
2138 line
.append(pdata
[p
]['raiseToStealDone'])
2139 line
.append(pdata
[p
]['success_Steal'])
2140 line
.append(pdata
[p
]['street1Seen'])
2141 line
.append(pdata
[p
]['street2Seen'])
2142 line
.append(pdata
[p
]['street3Seen'])
2143 line
.append(pdata
[p
]['street4Seen'])
2144 line
.append(pdata
[p
]['sawShowdown'])
2145 line
.append(pdata
[p
]['street1Aggr'])
2146 line
.append(pdata
[p
]['street2Aggr'])
2147 line
.append(pdata
[p
]['street3Aggr'])
2148 line
.append(pdata
[p
]['street4Aggr'])
2149 line
.append(pdata
[p
]['otherRaisedStreet0'])
2150 line
.append(pdata
[p
]['otherRaisedStreet1'])
2151 line
.append(pdata
[p
]['otherRaisedStreet2'])
2152 line
.append(pdata
[p
]['otherRaisedStreet3'])
2153 line
.append(pdata
[p
]['otherRaisedStreet4'])
2154 line
.append(pdata
[p
]['foldToOtherRaisedStreet0'])
2155 line
.append(pdata
[p
]['foldToOtherRaisedStreet1'])
2156 line
.append(pdata
[p
]['foldToOtherRaisedStreet2'])
2157 line
.append(pdata
[p
]['foldToOtherRaisedStreet3'])
2158 line
.append(pdata
[p
]['foldToOtherRaisedStreet4'])
2159 line
.append(pdata
[p
]['wonWhenSeenStreet1'])
2160 line
.append(pdata
[p
]['wonWhenSeenStreet2'])
2161 line
.append(pdata
[p
]['wonWhenSeenStreet3'])
2162 line
.append(pdata
[p
]['wonWhenSeenStreet4'])
2163 line
.append(pdata
[p
]['wonAtSD'])
2164 line
.append(pdata
[p
]['raiseFirstInChance'])
2165 line
.append(pdata
[p
]['raisedFirstIn'])
2166 line
.append(pdata
[p
]['foldBbToStealChance'])
2167 line
.append(pdata
[p
]['foldedBbToSteal'])
2168 line
.append(pdata
[p
]['foldSbToStealChance'])
2169 line
.append(pdata
[p
]['foldedSbToSteal'])
2170 line
.append(pdata
[p
]['street1CBChance'])
2171 line
.append(pdata
[p
]['street1CBDone'])
2172 line
.append(pdata
[p
]['street2CBChance'])
2173 line
.append(pdata
[p
]['street2CBDone'])
2174 line
.append(pdata
[p
]['street3CBChance'])
2175 line
.append(pdata
[p
]['street3CBDone'])
2176 line
.append(pdata
[p
]['street4CBChance'])
2177 line
.append(pdata
[p
]['street4CBDone'])
2178 line
.append(pdata
[p
]['foldToStreet1CBChance'])
2179 line
.append(pdata
[p
]['foldToStreet1CBDone'])
2180 line
.append(pdata
[p
]['foldToStreet2CBChance'])
2181 line
.append(pdata
[p
]['foldToStreet2CBDone'])
2182 line
.append(pdata
[p
]['foldToStreet3CBChance'])
2183 line
.append(pdata
[p
]['foldToStreet3CBDone'])
2184 line
.append(pdata
[p
]['foldToStreet4CBChance'])
2185 line
.append(pdata
[p
]['foldToStreet4CBDone'])
2186 line
.append(pdata
[p
]['totalProfit'])
2187 line
.append(pdata
[p
]['street1CheckCallRaiseChance'])
2188 line
.append(pdata
[p
]['street1CheckCallRaiseDone'])
2189 line
.append(pdata
[p
]['street2CheckCallRaiseChance'])
2190 line
.append(pdata
[p
]['street2CheckCallRaiseDone'])
2191 line
.append(pdata
[p
]['street3CheckCallRaiseChance'])
2192 line
.append(pdata
[p
]['street3CheckCallRaiseDone'])
2193 line
.append(pdata
[p
]['street4CheckCallRaiseChance'])
2194 line
.append(pdata
[p
]['street4CheckCallRaiseDone'])
2195 line
.append(pdata
[p
]['street0Calls'])
2196 line
.append(pdata
[p
]['street1Calls'])
2197 line
.append(pdata
[p
]['street2Calls'])
2198 line
.append(pdata
[p
]['street3Calls'])
2199 line
.append(pdata
[p
]['street4Calls'])
2200 line
.append(pdata
[p
]['street0Bets'])
2201 line
.append(pdata
[p
]['street1Bets'])
2202 line
.append(pdata
[p
]['street2Bets'])
2203 line
.append(pdata
[p
]['street3Bets'])
2204 line
.append(pdata
[p
]['street4Bets'])
2205 line
.append(pdata
[p
]['street0Raises'])
2206 line
.append(pdata
[p
]['street1Raises'])
2207 line
.append(pdata
[p
]['street2Raises'])
2208 line
.append(pdata
[p
]['street3Raises'])
2209 line
.append(pdata
[p
]['street4Raises'])
2212 hc
['gametypeId'] = gid
2213 hc
['playerId'] = pids
[p
]
2214 hc
['activeSeats'] = len(pids
)
2215 pos
= {'B':'B', 'S':'S', 0:'D', 1:'C', 2:'M', 3:'M', 4:'M', 5:'E', 6:'E', 7:'E', 8:'E', 9:'E' }
2216 hc
['position'] = pos
[pdata
[p
]['position']]
2217 hc
['tourneyTypeId'] = pdata
[p
]['tourneyTypeId']
2218 hc
['styleKey'] = styleKey
2219 for i
in range(len(line
)):
2220 if line
[i
]==True: line
[i
] = 1
2221 if line
[i
]==False: line
[i
] = 0
2223 hc
['game'] = [hc
['gametypeId']
2227 ,hc
['tourneyTypeId']
2234 #print h['game']==b['game'], h['game'], b['game']
2235 if h
['game']==b
['game']:
2236 b
['line'] = [sum(l
) for l
in zip(b
['line'], h
['line'])]
2238 if not match
: hcbulk
.append(h
)
2242 c
= self
.get_cursor()
2244 row
= hc
['line'] + hc
['game']
2245 num
= c
.execute(update_hudcache
, row
)
2246 # Try to do the update first. Do insert it did not work
2247 if ((self
.backend
== self
.PGSQL
and c
.statusmessage
!= "UPDATE 1")
2248 or (self
.backend
== self
.MYSQL_INNODB
and num
== 0)
2249 or (self
.backend
== self
.SQLITE
and num
.rowcount
== 0)):
2250 inserts
.append(hc
['game'] + hc
['line'])
2251 #row = hc['game'] + hc['line']
2252 #num = c.execute(insert_hudcache, row)
2253 #print "DEBUG: Successfully(?: %s) updated HudCacho using INSERT" % num
2255 #print "DEBUG: Successfully updated HudCacho using UPDATE"
2258 c
.executemany(insert_hudcache
, inserts
)
2262 def prepSessionsCache(self
, hid
, pids
, startTime
, sc
, heros
, doinsert
= False):
2263 """Update cached sessions. If no record exists, do an insert"""
2264 THRESHOLD
= timedelta(seconds
=int(self
.sessionTimeout
* 60))
2266 select_prepSC
= self
.sql
.query
['select_prepSC'].replace('%s', self
.sql
.query
['placeholder'])
2267 update_Hands_sid
= self
.sql
.query
['update_Hands_sid'].replace('%s', self
.sql
.query
['placeholder'])
2268 update_SC_sid
= self
.sql
.query
['update_SC_sid'].replace('%s', self
.sql
.query
['placeholder'])
2269 update_prepSC
= self
.sql
.query
['update_prepSC'].replace('%s', self
.sql
.query
['placeholder'])
2271 #print "DEBUG: %s %s %s" %(hid, pids, pdata)
2273 for p
, id in pids
.iteritems():
2275 hand
['startTime'] = startTime
.replace(tzinfo
=None)
2280 lower
= hand
['startTime']-THRESHOLD
2281 upper
= hand
['startTime']+THRESHOLD
2282 for i
in range(len(sc
['bk'])):
2283 if ((lower
<= sc
['bk'][i
]['sessionEnd'])
2284 and (upper
>= sc
['bk'][i
]['sessionStart'])):
2285 if ((hand
['startTime'] <= sc
['bk'][i
]['sessionEnd'])
2286 and (hand
['startTime'] >= sc
['bk'][i
]['sessionStart'])):
2288 elif hand
['startTime'] < sc
['bk'][i
]['sessionStart']:
2289 sc
['bk'][i
]['sessionStart'] = hand
['startTime']
2291 elif hand
['startTime'] > sc
['bk'][i
]['sessionEnd']:
2292 sc
['bk'][i
]['sessionEnd'] = hand
['startTime']
2296 sc
['bk'][id]['ids'].append(hid
)
2298 if sc
['bk'][id[0]]['startTime'] < sc
['bk'][id[1]]['startTime']:
2299 sc
['bk'][id[0]]['endTime'] = sc
['bk'][id[1]]['endTime']
2301 sc
['bk'][id[0]]['startTime'] = sc
['bk'][id[1]]['startTime']
2304 sc
['bk'][id]['ids'].append(hid
)
2307 hand
['sessionStart'] = hand
['startTime']
2308 hand
['sessionEnd'] = hand
['startTime']
2310 hand
['ids'].append(hid
)
2311 sc
['bk'].append(hand
)
2314 c
= self
.get_cursor()
2315 c
.execute("SELECT max(sessionId) FROM SessionsCache")
2316 id = c
.fetchone()[0]
2319 for i
in range(len(sc
['bk'])):
2320 lower
= sc
['bk'][i
]['sessionStart'] - THRESHOLD
2321 upper
= sc
['bk'][i
]['sessionEnd'] + THRESHOLD
2322 c
.execute(select_prepSC
, (lower
, upper
))
2323 r
= self
.fetchallDict(c
)
2326 start
, end
, update
= r
[0]['sessionStart'], r
[0]['sessionEnd'], False
2327 if sc
['bk'][i
]['sessionStart'] < start
:
2328 start
, update
= sc
['bk'][i
]['sessionStart'], True
2329 if sc
['bk'][i
]['sessionEnd'] > end
:
2330 end
, update
= sc
['bk'][i
]['sessionEnd'], True
2332 c
.execute(update_prepSC
, [start
, end
, r
[0]['id']])
2333 for h
in sc
['bk'][i
]['ids']:
2334 sc
[h
] = {'id': r
[0]['id'], 'data': [start
, end
]}
2337 start
, end
, merge
= None, None, []
2339 r
.append(sc
['bk'][i
])
2342 if start
> n
['sessionStart']:
2343 start
= n
['sessionStart']
2344 else: start
= n
['sessionStart']
2346 if end
< n
['sessionEnd']:
2347 end
= n
['sessionEnd']
2348 else: end
= n
['sessionEnd']
2351 if n
['id'] in merge
: continue
2352 merge
.append(n
['id'])
2353 c
.execute(update_Hands_sid
, (sid
, n
['id']))
2354 c
.execute(update_SC_sid
, (start
, end
, sid
, n
['id']))
2356 for k
, v
in sc
.iteritems():
2357 if k
!='bk' and v
['id'] in merge
:
2359 for h
in sc
['bk'][i
]['ids']:
2360 sc
[h
] = {'id': sid
, 'data': [start
, end
]}
2363 start
= sc
['bk'][i
]['sessionStart']
2364 end
= sc
['bk'][i
]['sessionEnd']
2365 for h
in sc
['bk'][i
]['ids']:
2366 sc
[h
] = {'id': sid
, 'data': [start
, end
]}
2370 def storeSessionsCache(self
, hid
, pids
, startTime
, game
, gid
, pdata
, sc
, gsc
, tz
, heros
, doinsert
= False):
2371 """Update cached sessions. If no record exists, do an insert"""
2373 tz_dt
= datetime
.utcnow() - datetime
.today()
2374 tz
= tz_dt
.seconds
/3600
2376 THRESHOLD
= timedelta(seconds
=int(self
.sessionTimeout
* 60))
2377 local
= startTime
+ timedelta(hours
=int(tz
))
2378 date
= "d%02d%02d%02d" % (local
.year
- 2000, local
.month
, local
.day
)
2380 select_SC
= self
.sql
.query
['select_SC'].replace('%s', self
.sql
.query
['placeholder'])
2381 update_SC
= self
.sql
.query
['update_SC'].replace('%s', self
.sql
.query
['placeholder'])
2382 insert_SC
= self
.sql
.query
['insert_SC'].replace('%s', self
.sql
.query
['placeholder'])
2383 delete_SC
= self
.sql
.query
['delete_SC'].replace('%s', self
.sql
.query
['placeholder'])
2384 update_Hands_gsid
= self
.sql
.query
['update_Hands_gsid'].replace('%s', self
.sql
.query
['placeholder'])
2386 #print "DEBUG: %s %s %s" %(hid, pids, pdata)
2388 for p
, id in pids
.iteritems():
2391 hand
['totalProfit'] = 0
2392 hand
['playerId'] = id
2393 hand
['gametypeId'] = None
2395 hand
['startTime'] = startTime
.replace(tzinfo
=None)
2397 hand
['tourneys'] = 0
2398 hand
['tourneyTypeId'] = None
2401 if (game
['type']=='summary'):
2402 hand
['type'] = 'tour'
2403 hand
['tourneys'] = 1
2404 hand
['tourneyTypeId'] = pdata
['tourneyTypeId']
2406 if pdata
['buyinCurrency'] == pdata
['winningsCurrency'][p
]:
2407 hand
['totalProfit'] = pdata
['winnings'][p
] - (pdata
['buyin'] + pdata
['fee'])
2408 else: hand
['totalProfit'] = pdata
['winnings'][p
]
2409 elif (game
['type']=='ring'):
2410 hand
['type'] = game
['type']
2411 hand
['gametypeId'] = gid
2412 if pdata
[p
]['street0VPI'] or pdata
[p
]['street1Seen']:
2414 hand
['totalProfit'] = pdata
[p
]['totalProfit']
2416 elif (game
['type']=='tour'):
2417 hand
['type'] = game
['type']
2418 hand
['tourneyTypeId'] = pdata
[p
]['tourneyTypeId']
2419 if pdata
[p
]['street0VPI'] or pdata
[p
]['street1Seen']:
2425 lower
= hand
['startTime']-THRESHOLD
2426 upper
= hand
['startTime']+THRESHOLD
2427 for i
in range(len(gsc
['bk'])):
2428 if ((hand
['date'] == gsc
['bk'][i
]['date'])
2429 and (hand
['gametypeId'] == gsc
['bk'][i
]['gametypeId'])
2430 and (hand
['playerId'] == gsc
['bk'][i
]['playerId'])
2431 and (hand
['tourneyTypeId'] == gsc
['bk'][i
]['tourneyTypeId'])
2432 and (hand
['played'] == gsc
['bk'][i
]['played'])):
2433 if ((lower
<= gsc
['bk'][i
]['gameEnd'])
2434 and (upper
>= gsc
['bk'][i
]['gameStart'])):
2435 if ((hand
['startTime'] <= gsc
['bk'][i
]['gameEnd'])
2436 and (hand
['startTime'] >= gsc
['bk'][i
]['gameStart'])):
2437 gsc
['bk'][i
]['hands'] += hand
['hands']
2438 gsc
['bk'][i
]['tourneys'] += hand
['tourneys']
2439 gsc
['bk'][i
]['totalProfit'] += hand
['totalProfit']
2440 elif hand
['startTime'] < gsc
['bk'][i
]['gameStart']:
2441 gsc
['bk'][i
]['hands'] += hand
['hands']
2442 gsc
['bk'][i
]['tourneys'] += hand
['tourneys']
2443 gsc
['bk'][i
]['totalProfit'] += hand
['totalProfit']
2444 gsc
['bk'][i
]['gameStart'] = hand
['startTime']
2445 elif hand
['startTime'] > gsc
['bk'][i
]['gameEnd']:
2446 gsc
['bk'][i
]['hands'] += hand
['hands']
2447 gsc
['bk'][i
]['tourneys'] += hand
['tourneys']
2448 gsc
['bk'][i
]['totalProfit'] += hand
['totalProfit']
2449 gsc
['bk'][i
]['gameEnd'] = hand
['startTime']
2452 gsc
['bk'][id[0]]['ids'].append(hid
)
2454 if gsc
['bk'][id[0]]['gameStart'] < gsc
['bk'][id[1]]['gameStart']:
2455 gsc
['bk'][id[0]]['gameEnd'] = gsc
['bk'][id[1]]['gameEnd']
2456 else: gsc
['bk'][id[0]]['gameStart'] = gsc
['bk'][id[1]]['gameStart']
2457 gsc
['bk'][id[0]]['hands'] += hand
['hands']
2458 gsc
['bk'][id[0]]['tourneys'] += hand
['tourneys']
2459 gsc
['bk'][id[0]]['totalProfit'] += hand
['totalProfit']
2460 gsc
['bk'].pop
[id[1]]
2461 gsc
['bk'][id[0]]['ids'].append(hid
)
2463 hand
['gameStart'] = hand
['startTime']
2464 hand
['gameEnd'] = hand
['startTime']
2466 hand
['ids'].append(hid
)
2467 gsc
['bk'].append(hand
)
2470 c
= self
.get_cursor()
2471 for i
in range(len(gsc
['bk'])):
2472 hid
= gsc
['bk'][i
]['hid']
2473 sid
, start
, end
= sc
[hid
]['id'], sc
[hid
]['data'][0], sc
[hid
]['data'][1]
2474 lower
= gsc
['bk'][i
]['gameStart'] - THRESHOLD
2475 upper
= gsc
['bk'][i
]['gameEnd'] + THRESHOLD
2476 game
= [gsc
['bk'][i
]['date']
2477 ,gsc
['bk'][i
]['type']
2478 ,gsc
['bk'][i
]['gametypeId']
2479 ,gsc
['bk'][i
]['tourneyTypeId']
2480 ,gsc
['bk'][i
]['playerId']
2481 ,gsc
['bk'][i
]['played']]
2482 row
= [lower
, upper
] + game
2483 c
.execute(select_SC
, row
)
2484 r
= self
.fetchallDict(c
)
2487 gstart
, gend
= r
[0]['gameStart'], r
[0]['gameEnd']
2488 if gsc
['bk'][i
]['gameStart'] < gstart
:
2489 gstart
= gsc
['bk'][i
]['gameStart']
2490 if gsc
['bk'][i
]['gameEnd'] > gend
:
2491 gend
= gsc
['bk'][i
]['gameEnd']
2492 row
= [start
, end
, gstart
, gend
2493 ,gsc
['bk'][i
]['hands']
2494 ,gsc
['bk'][i
]['tourneys']
2495 ,gsc
['bk'][i
]['totalProfit']
2497 c
.execute(update_SC
, row
)
2498 for h
in gsc
['bk'][i
]['ids']: gsc
[h
] = {'id': r
[0]['id']}
2501 gstart
, gend
, hands
, tourneys
, totalProfit
, delete
= None, None, 0, 0, 0, []
2502 for n
in r
: delete
.append(n
['id'])
2504 for d
in delete
: c
.execute(delete_SC
, d
)
2505 r
.append(gsc
['bk'][i
])
2508 if gstart
> n
['gameStart']:
2509 gstart
= n
['gameStart']
2510 else: gstart
= n
['gameStart']
2512 if gend
< n
['gameEnd']:
2514 else: gend
= n
['gameEnd']
2516 tourneys
+= n
['tourneys']
2517 totalProfit
+= n
['totalProfit']
2518 row
= [start
, end
, gstart
, gend
, sid
] + game
+ [hands
, tourneys
, totalProfit
]
2519 c
.execute(insert_SC
, row
)
2520 gsid
= self
.get_last_insert_id(c
)
2521 for h
in gsc
['bk'][i
]['ids']: gsc
[h
] = {'id': gsid
}
2523 c
.execute(update_Hands_gsid
, (gsid
, m
))
2526 gstart
= gsc
['bk'][i
]['gameStart']
2527 gend
= gsc
['bk'][i
]['gameEnd']
2528 hands
= gsc
['bk'][i
]['hands']
2529 tourneys
= gsc
['bk'][i
]['tourneys']
2530 totalProfit
= gsc
['bk'][i
]['totalProfit']
2531 row
= [start
, end
, gstart
, gend
, sid
] + game
+ [hands
, tourneys
, totalProfit
]
2532 c
.execute(insert_SC
, row
)
2533 gsid
= self
.get_last_insert_id(c
)
2534 for h
in gsc
['bk'][i
]['ids']: gsc
[h
] = {'id': gsid
}
2536 # Something bad happened
2542 def getSqlGameTypeId(self
, siteid
, game
, printdata
= False):
2543 if(self
.gtcache
== None):
2544 self
.gtcache
= LambdaDict(lambda key
:self
.insertGameTypes(key
[0], key
[1]))
2546 self
.gtprintdata
= printdata
2548 if game
['category'] in ['studhilo', 'omahahilo']:
2550 elif game
['category'] in ['razz','27_3draw','badugi', '27_1draw']:
2553 gtinfo
= (siteid
, game
['type'], game
['category'], game
['limitType'], game
['currency'],
2554 game
['mix'], int(Decimal(game
['sb'])*100), int(Decimal(game
['bb'])*100),
2555 game
['maxSeats'], game
['ante'])
2557 gtinsert
= (siteid
, game
['currency'], game
['type'], game
['base'], game
['category'], game
['limitType'], hilo
,
2558 game
['mix'], int(Decimal(game
['sb'])*100), int(Decimal(game
['bb'])*100),
2559 int(Decimal(game
['bb'])*100), int(Decimal(game
['bb'])*200), game
['maxSeats'], game
['ante'])
2561 result
= self
.gtcache
[(gtinfo
, gtinsert
)]
2562 # NOTE: Using the LambdaDict does the same thing as:
2563 #if player in self.pcache:
2564 # #print "DEBUG: cachehit"
2567 # self.pcache[player] = self.insertPlayer(player, siteid)
2568 #result[player] = self.pcache[player]
2572 def insertGameTypes(self
, gtinfo
, gtinsert
):
2574 c
= self
.get_cursor()
2575 q
= self
.sql
.query
['getGametypeNL']
2576 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2577 c
.execute(q
, gtinfo
)
2581 if self
.gtprintdata
:
2582 print _("######## Gametype ##########")
2584 pp
= pprint
.PrettyPrinter(indent
=4)
2586 print _("###### End Gametype ########")
2588 c
.execute(self
.sql
.query
['insertGameTypes'], gtinsert
)
2589 result
= self
.get_last_insert_id(c
)
2594 def storeFile(self
, fdata
):
2595 q
= self
.sql
.query
['store_file']
2596 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2597 c
= self
.get_cursor()
2599 id = self
.get_last_insert_id(c
)
2602 def updateFile(self
, fdata
):
2603 q
= self
.sql
.query
['update_file']
2604 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2605 c
= self
.get_cursor()
2608 def getHeroIds(self
, pids
, sitename
):
2609 #Grab playerIds using hero names in HUD_Config.xml
2611 # derive list of program owner's player ids
2612 hero
= {} # name of program owner indexed by site id
2614 # make sure at least two values in list
2615 # so that tuple generation creates doesn't use
2617 for site
in self
.config
.get_supported_sites():
2618 hero
= self
.config
.supported_sites
[site
].screen_name
2619 for n
, v
in pids
.iteritems():
2620 if n
== hero
and sitename
== site
:
2624 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
2625 #print _("Error aquiring hero ids:"), str(sys.exc_value)
2628 def fetchallDict(self
, cursor
):
2629 data
= cursor
.fetchall()
2630 if not data
: return []
2631 desc
= cursor
.description
2632 results
= [0]*len(data
)
2633 for i
in range(len(data
)):
2635 for n
in range(len(desc
)):
2637 results
[i
][name
] = data
[i
][n
]
2640 def nextHandId(self
):
2641 c
= self
.get_cursor()
2642 c
.execute("SELECT max(id) FROM Hands")
2643 id = c
.fetchone()[0]
2648 def isDuplicate(self
, gametypeID
, siteHandNo
):
2650 c
= self
.get_cursor()
2651 c
.execute(self
.sql
.query
['isAlreadyInDB'], (gametypeID
, siteHandNo
))
2652 result
= c
.fetchall()
2657 #################################
2658 # Finish of NEWIMPORT CODE
2659 #################################
2661 # read HandToWrite objects from q and insert into database
2662 def insert_queue_hands(self
, q
, maxwait
=10, commitEachHand
=True):
2663 n
,fails
,maxTries
,firstWait
= 0,0,4,0.1
2668 h
= q
.get(True) # (True,maxWait) has probs if 1st part of import is all dups
2670 # Queue.Empty exception thrown if q was empty for
2671 # if q.empty() also possible - no point if testing for Queue.Empty exception
2672 # maybe increment a counter and only break after a few times?
2673 # could also test threading.active_count() or look through threading.enumerate()
2674 # so break immediately if no threads, but count up to X exceptions if a writer
2675 # thread is still alive???
2676 print _("queue empty too long - writer stopping ...")
2679 print _("writer stopping, error reading queue:"), str(sys
.exc_info())
2681 #print "got hand", str(h.get_finished())
2683 tries
,wait
,again
= 0,firstWait
,True
2686 again
= False # set this immediately to avoid infinite loops!
2687 if h
.get_finished():
2688 # all items on queue processed
2691 self
.store_the_hand(h
)
2692 # optional commit, could be every hand / every N hands / every time a
2693 # commit message received?? mark flag to indicate if commits outstanding
2698 #print "iqh store error", sys.exc_value # debug
2700 if re
.search('deadlock', str(sys
.exc_info()[1]), re
.I
):
2701 # deadlocks only a problem if hudcache is being updated
2703 if tries
< maxTries
and wait
< 5: # wait < 5 just to make sure
2704 print _("deadlock detected - trying again ...")
2709 print _("Too many deadlocks - failed to store hand"), h
.get_siteHandNo()
2712 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
2713 print _("***Error storing hand:"), err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
2714 # finished trying to store hand
2716 # always reduce q count, whether or not this hand was saved ok
2723 print _("db writer finished: stored %d hands (%d fails) in %.1f seconds") % (n
, fails
, time()-t0
)
2724 # end def insert_queue_hands():
2726 def send_finish_msg(self
, q
):
2728 h
= HandToWrite(True)
2731 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
2732 print _("***Error sending finish:"), err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
2733 # end def send_finish_msg():
2735 def getSqlTourneyTypeIDs(self
, hand
):
2736 if(self
.ttcache
== None):
2737 self
.ttcache
= LambdaDict(lambda key
:self
.insertTourneyType(key
[0], key
[1], key
[2], key
[3]))
2739 tourneydata
= (hand
.siteId
, hand
.buyinCurrency
, hand
.buyin
, hand
.fee
, hand
.gametype
['category'],
2740 hand
.gametype
['limitType'], hand
.maxseats
, hand
.isKO
,
2741 hand
.isRebuy
, hand
.isAddOn
, hand
.speed
, hand
.isShootout
, hand
.isMatrix
)
2743 tourneyInsert
= (hand
.siteId
, hand
.buyinCurrency
, hand
.buyin
, hand
.fee
, hand
.gametype
['category'],
2744 hand
.gametype
['limitType'], hand
.maxseats
,
2745 hand
.buyInChips
, hand
.isKO
, hand
.koBounty
, hand
.isRebuy
,
2746 hand
.isAddOn
, hand
.speed
, hand
.isShootout
, hand
.isMatrix
, hand
.added
, hand
.addedCurrency
)
2748 result
= self
.ttcache
[(hand
.tourNo
, hand
.siteId
, tourneydata
, tourneyInsert
)]
2749 # NOTE: Using the LambdaDict does the same thing as:
2750 #if player in self.pcache:
2751 # #print "DEBUG: cachehit"
2754 # self.pcache[player] = self.insertPlayer(player, siteid)
2755 #result[player] = self.pcache[player]
2759 def insertTourneyType(self
, tournNo
, siteId
, tourneydata
, tourneyInsert
):
2761 c
= self
.get_cursor()
2762 q
= self
.sql
.query
['getTourneyTypeIdByTourneyNo']
2763 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2765 #NOTE/FIXME?: MySQL has ON DUPLICATE KEY UPDATE
2767 # INSERT INTO `tags` (`tag`, `count`)
2769 # ON DUPLICATE KEY UPDATE `count`=`count`+1;
2772 #print "DEBUG: name: %s site: %s" %(name, site_id)
2774 c
.execute (q
, (tournNo
, siteId
))
2777 if (tmp
== None): #new player
2778 c
.execute (self
.sql
.query
['getTourneyTypeId'].replace('%s', self
.sql
.query
['placeholder']), tourneydata
)
2782 except TypeError: #this means we need to create a new entry
2783 c
.execute (self
.sql
.query
['insertTourneyType'].replace('%s', self
.sql
.query
['placeholder']), tourneyInsert
)
2784 #Get last id might be faster here.
2785 #c.execute ("SELECT id FROM Players WHERE name=%s", (name,))
2786 result
= self
.get_last_insert_id(c
)
2791 def createOrUpdateTourney(self
, summary
):
2792 cursor
= self
.get_cursor()
2793 q
= self
.sql
.query
['getTourneyByTourneyNo'].replace('%s', self
.sql
.query
['placeholder'])
2794 cursor
.execute(q
, (summary
.siteId
, summary
.tourNo
))
2796 columnNames
=[desc
[0] for desc
in cursor
.description
]
2797 result
=cursor
.fetchone()
2800 expectedValues
= ('comment', 'tourneyName', 'matrixIdProcessed', 'totalRebuyCount', 'totalAddOnCount',
2801 'prizepool', 'startTime', 'entries', 'commentTs', 'endTime')
2803 resultDict
= dict(zip(columnNames
, result
))
2805 tourneyId
= resultDict
["id"]
2806 for ev
in expectedValues
:
2807 if getattr(summary
, ev
)==None and resultDict
[ev
]!=None:#DB has this value but object doesnt, so update object
2808 setattr(summary
, ev
, resultDict
[ev
])
2809 elif getattr(summary
, ev
)!=None and resultDict
[ev
]==None:#object has this value but DB doesnt, so update DB
2811 #elif ev=="startTime":
2812 # if (resultDict[ev] < summary.startTime):
2813 # summary.startTime=resultDict[ev]
2815 q
= self
.sql
.query
['updateTourney'].replace('%s', self
.sql
.query
['placeholder'])
2816 row
= (summary
.entries
, summary
.prizepool
, summary
.startTime
, summary
.endTime
, summary
.tourneyName
,
2817 summary
.matrixIdProcessed
, summary
.totalRebuyCount
, summary
.totalAddOnCount
, summary
.comment
,
2818 summary
.commentTs
, tourneyId
2820 cursor
.execute(q
, row
)
2822 cursor
.execute (self
.sql
.query
['insertTourney'].replace('%s', self
.sql
.query
['placeholder']),
2823 (summary
.tourneyTypeId
, summary
.tourNo
, summary
.entries
, summary
.prizepool
, summary
.startTime
,
2824 summary
.endTime
, summary
.tourneyName
, summary
.matrixIdProcessed
, summary
.totalRebuyCount
, summary
.totalAddOnCount
))
2825 tourneyId
= self
.get_last_insert_id(cursor
)
2827 #end def createOrUpdateTourney
2829 def getSqlTourneyIDs(self
, hand
):
2830 if(self
.tcache
== None):
2831 self
.tcache
= LambdaDict(lambda key
:self
.insertTourney(key
[0], key
[1], key
[2], key
[3]))
2833 result
= self
.tcache
[(hand
.siteId
, hand
.tourNo
, hand
.tourneyTypeId
, hand
.startTime
)]
2834 # NOTE: Using the LambdaDict does the same thing as:
2835 #if player in self.pcache:
2836 # #print "DEBUG: cachehit"
2839 # self.pcache[player] = self.insertPlayer(player, siteid)
2840 #result[player] = self.pcache[player]
2844 def insertTourney(self
, siteId
, tourNo
, tourneyTypeId
, startTime
):
2846 c
= self
.get_cursor()
2847 q
= self
.sql
.query
['getTourneyByTourneyNo']
2848 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2850 #NOTE/FIXME?: MySQL has ON DUPLICATE KEY UPDATE
2852 # INSERT INTO `tags` (`tag`, `count`)
2854 # ON DUPLICATE KEY UPDATE `count`=`count`+1;
2857 #print "DEBUG: name: %s site: %s" %(name, site_id)
2859 c
.execute (q
, (siteId
, tourNo
))
2862 if (tmp
== None): #new player
2863 c
.execute (self
.sql
.query
['insertTourney'].replace('%s', self
.sql
.query
['placeholder']),
2864 (tourneyTypeId
, tourNo
, None, None,
2865 startTime
, None, None, None, None, None))
2866 #Get last id might be faster here.
2867 #c.execute ("SELECT id FROM Players WHERE name=%s", (name,))
2868 result
= self
.get_last_insert_id(c
)
2873 def createOrUpdateTourneysPlayers(self
, summary
):
2874 tourneysPlayersIds
={}
2875 for player
in summary
.players
:
2876 playerId
= summary
.dbid_pids
[player
]
2877 cursor
= self
.get_cursor()
2878 cursor
.execute (self
.sql
.query
['getTourneysPlayersByIds'].replace('%s', self
.sql
.query
['placeholder']),
2879 (summary
.tourneyId
, playerId
))
2880 columnNames
=[desc
[0] for desc
in cursor
.description
]
2881 result
=cursor
.fetchone()
2884 expectedValues
= ('rank', 'winnings', 'winningsCurrency', 'rebuyCount', 'addOnCount', 'koCount')
2886 resultDict
= dict(zip(columnNames
, result
))
2887 tourneysPlayersIds
[player
[1]]=result
[0]
2888 for ev
in expectedValues
:
2890 if ev
!="winnings" and ev
!="winningsCurrency":
2891 summaryAttribute
+="s"
2893 if getattr(summary
, summaryAttribute
)[player
]==None and resultDict
[ev
]!=None:#DB has this value but object doesnt, so update object
2894 setattr(summary
, summaryAttribute
, resultDict
[ev
][player
])
2895 elif getattr(summary
, summaryAttribute
)[player
]!=None and resultDict
[ev
]==None:#object has this value but DB doesnt, so update DB
2898 q
= self
.sql
.query
['updateTourneysPlayer'].replace('%s', self
.sql
.query
['placeholder'])
2899 inputs
= (summary
.ranks
[player
],
2900 summary
.winnings
[player
],
2901 summary
.winningsCurrency
[player
],
2902 summary
.rebuyCounts
[player
],
2903 summary
.addOnCounts
[player
],
2904 summary
.koCounts
[player
],
2905 tourneysPlayersIds
[player
[1]]
2908 #pp = pprint.PrettyPrinter(indent=4)
2910 cursor
.execute(q
, inputs
)
2912 #print "all values: tourneyId",summary.tourneyId, "playerId",playerId, "rank",summary.ranks[player], "winnings",summary.winnings[player], "winCurr",summary.winningsCurrency[player], summary.rebuyCounts[player], summary.addOnCounts[player], summary.koCounts[player]
2913 if summary
.ranks
[player
]:
2914 cursor
.execute (self
.sql
.query
['insertTourneysPlayer'].replace('%s', self
.sql
.query
['placeholder']),
2915 (summary
.tourneyId
, playerId
, int(summary
.ranks
[player
]), int(summary
.winnings
[player
]), summary
.winningsCurrency
[player
],
2916 summary
.rebuyCounts
[player
], summary
.addOnCounts
[player
], summary
.koCounts
[player
]))
2918 cursor
.execute (self
.sql
.query
['insertTourneysPlayer'].replace('%s', self
.sql
.query
['placeholder']),
2919 (summary
.tourneyId
, playerId
, None, None, None,
2920 summary
.rebuyCounts
[player
], summary
.addOnCounts
[player
], summary
.koCounts
[player
]))
2921 tourneysPlayersIds
[player
[1]]=self
.get_last_insert_id(cursor
)
2922 return tourneysPlayersIds
2924 def getSqlTourneysPlayersIDs(self
, hand
):
2926 if(self
.tpcache
== None):
2927 self
.tpcache
= LambdaDict(lambda key
:self
.insertTourneysPlayers(key
[0], key
[1]))
2929 for player
in hand
.players
:
2930 playerId
= hand
.dbid_pids
[player
[1]]
2931 result
[player
[1]] = self
.tpcache
[(playerId
,hand
.tourneyId
)]
2932 # NOTE: Using the LambdaDict does the same thing as:
2933 #if player in self.pcache:
2934 # #print "DEBUG: cachehit"
2937 # self.pcache[player] = self.insertPlayer(player, siteid)
2938 #result[player] = self.pcache[player]
2942 def insertTourneysPlayers(self
, playerId
, tourneyId
):
2944 c
= self
.get_cursor()
2945 q
= self
.sql
.query
['getTourneysPlayersByIds']
2946 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2948 #NOTE/FIXME?: MySQL has ON DUPLICATE KEY UPDATE
2950 # INSERT INTO `tags` (`tag`, `count`)
2952 # ON DUPLICATE KEY UPDATE `count`=`count`+1;
2955 #print "DEBUG: name: %s site: %s" %(name, site_id)
2957 c
.execute (q
, (tourneyId
, playerId
))
2960 if (tmp
== None): #new player
2961 c
.execute (self
.sql
.query
['insertTourneysPlayer'].replace('%s',self
.sql
.query
['placeholder'])
2962 ,(tourneyId
, playerId
, None, None, None, None, None, None))
2963 #Get last id might be faster here.
2964 #c.execute ("SELECT id FROM Players WHERE name=%s", (name,))
2965 result
= self
.get_last_insert_id(c
)
2970 def getTourneyTypesIds(self
):
2971 c
= self
.connection
.cursor()
2972 c
.execute(self
.sql
.query
['getTourneyTypesIds'])
2973 result
= c
.fetchall()
2975 #end def getTourneyTypesIds
2977 def getTourneyInfo(self
, siteName
, tourneyNo
):
2978 c
= self
.get_cursor()
2979 c
.execute(self
.sql
.query
['getTourneyInfo'], (siteName
, tourneyNo
))
2980 columnNames
=c
.description
2983 for column
in columnNames
:
2984 names
.append(column
[0])
2988 #end def getTourneyInfo
2990 def getTourneyPlayerInfo(self
, siteName
, tourneyNo
, playerName
):
2991 c
= self
.get_cursor()
2992 c
.execute(self
.sql
.query
['getTourneyPlayerInfo'], (siteName
, tourneyNo
, playerName
))
2993 columnNames
=c
.description
2996 for column
in columnNames
:
2997 names
.append(column
[0])
3001 #end def getTourneyPlayerInfo
3004 # Class used to hold all the data needed to write a hand to the db
3005 # mainParser() in fpdb_parse_logic.py creates one of these and then passes it to
3006 # self.insert_queue_hands()
3010 def __init__(self
, finished
= False): # db_name and game not used any more
3012 self
.finished
= finished
3014 self
.settings
= None
3016 self
.category
= None
3017 self
.siteTourneyNo
= None
3020 self
.knockout
= None
3022 self
.prizepool
= None
3023 self
.tourneyStartTime
= None
3024 self
.isTourney
= None
3025 self
.tourneyTypeId
= None
3027 self
.siteHandNo
= None
3028 self
.gametypeID
= None
3029 self
.handStartTime
= None
3031 self
.playerIDs
= None
3032 self
.startCashes
= None
3033 self
.positions
= None
3035 self
.cardValues
= None
3036 self
.cardSuits
= None
3037 self
.boardValues
= None
3038 self
.boardSuits
= None
3039 self
.winnings
= None
3041 self
.actionTypes
= None
3043 self
.actionAmounts
= None
3044 self
.actionNos
= None
3045 self
.hudImportData
= None
3046 self
.maxSeats
= None
3047 self
.tableName
= None
3050 print _("%s error: %s") % ("HandToWrite.init", str(sys
.exc_info()))
3054 def set_all( self
, config
, settings
, base
, category
, siteTourneyNo
, buyin
3055 , fee
, knockout
, entries
, prizepool
, tourneyStartTime
3056 , isTourney
, tourneyTypeId
, siteID
, siteHandNo
3057 , gametypeID
, handStartTime
, names
, playerIDs
, startCashes
3058 , positions
, antes
, cardValues
, cardSuits
, boardValues
, boardSuits
3059 , winnings
, rakes
, actionTypes
, allIns
, actionAmounts
3060 , actionNos
, hudImportData
, maxSeats
, tableName
, seatNos
):
3063 self
.config
= config
3064 self
.settings
= settings
3066 self
.category
= category
3067 self
.siteTourneyNo
= siteTourneyNo
3070 self
.knockout
= knockout
3071 self
.entries
= entries
3072 self
.prizepool
= prizepool
3073 self
.tourneyStartTime
= tourneyStartTime
3074 self
.isTourney
= isTourney
3075 self
.tourneyTypeId
= tourneyTypeId
3076 self
.siteID
= siteID
3077 self
.siteHandNo
= siteHandNo
3078 self
.gametypeID
= gametypeID
3079 self
.handStartTime
= handStartTime
3081 self
.playerIDs
= playerIDs
3082 self
.startCashes
= startCashes
3083 self
.positions
= positions
3085 self
.cardValues
= cardValues
3086 self
.cardSuits
= cardSuits
3087 self
.boardValues
= boardValues
3088 self
.boardSuits
= boardSuits
3089 self
.winnings
= winnings
3091 self
.actionTypes
= actionTypes
3092 self
.allIns
= allIns
3093 self
.actionAmounts
= actionAmounts
3094 self
.actionNos
= actionNos
3095 self
.hudImportData
= hudImportData
3096 self
.maxSeats
= maxSeats
3097 self
.tableName
= tableName
3098 self
.seatNos
= seatNos
3100 print _("%s error: %s") % ("HandToWrite.set_all", str(sys
.exc_info()))
3104 def get_finished(self
):
3105 return( self
.finished
)
3106 # end def get_finished
3108 def get_siteHandNo(self
):
3109 return( self
.siteHandNo
)
3110 # end def get_siteHandNo
3113 if __name__
=="__main__":
3114 c
= Configuration
.Config()
3115 sql
= SQL
.Sql(db_server
= 'sqlite')
3117 db_connection
= Database(c
) # mysql fpdb holdem
3118 # db_connection = Database(c, 'fpdb-p', 'test') # mysql fpdb holdem
3119 # db_connection = Database(c, 'PTrackSv2', 'razz') # mysql razz
3120 # db_connection = Database(c, 'ptracks', 'razz') # postgres
3121 print "database connection object = ", db_connection
.connection
3122 # db_connection.recreate_tables()
3123 db_connection
.dropAllIndexes()
3124 db_connection
.createAllIndexes()
3126 h
= db_connection
.get_last_hand()
3127 print "last hand = ", h
3129 hero
= db_connection
.get_player_id(c
, 'PokerStars', 'nutOmatic')
3131 print "nutOmatic player_id", hero
3133 # example of displaying query plan in sqlite:
3134 if db_connection
.backend
== 4:
3136 c
= db_connection
.get_cursor()
3137 c
.execute('explain query plan '+sql
.query
['get_table_name'], (h
, ))
3138 for row
in c
.fetchall():
3139 print "Query plan:", row
3143 stat_dict
= db_connection
.get_stats_from_hand(h
, "ring")
3145 for p
in stat_dict
.keys():
3146 print p
, " ", stat_dict
[p
]
3148 print _("cards ="), db_connection
.get_cards(u
'1')
3149 db_connection
.close_connection
3151 print _("get_stats took: %4.3f seconds") % (t1
-t0
)
3153 print _("Press ENTER to continue.")
3154 sys
.stdin
.readline()
3156 #Code borrowed from http://push.cx/2008/caching-dictionaries-in-python-vs-ruby
3157 class LambdaDict(dict):
3158 def __init__(self
, l
):
3159 super(LambdaDict
, self
).__init
__()
3162 def __getitem__(self
, key
):
3164 return self
.get(key
)
3166 self
.__setitem
__(key
, self
.l(key
))
3167 return self
.get(key
)