2 # -*- coding: utf-8 -*-
5 Create and manage the database objects.
7 # Copyright 2008-2011, Ray E. Barker
9 # This program is free software; you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation; either version 2 of the License, or
12 # (at your option) any later version.
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
19 # You should have received a copy of the GNU General Public License
20 # along with this program; if not, write to the Free Software
21 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 _
= L10n
.get_translation()
26 ########################################################################
28 # TODO: - rebuild indexes / vacuum option
29 # - check speed of get_stats_from_hand() - add log info
30 # - check size of db, seems big? (mysql)
31 # - investigate size of mysql db (200K for just 7K hands? 2GB for 140K hands?)
33 # postmaster -D /var/lib/pgsql/data
35 # Standard Library modules
39 from datetime
import datetime
, date
, time
, timedelta
40 from time
import time
, strftime
, sleep
41 from decimal_wrapper
import Decimal
49 # logging has been set up in fpdb.py or HUD_main.py, use their settings:
50 log
= logging
.getLogger("db")
52 # FreePokerTools modules
56 from Exceptions
import *
60 # Other library modules
62 import sqlalchemy
.pool
as pool
65 log
.info(_("Not using sqlalchemy connection pool."))
72 log
.info(_("Not using numpy to define variance in sqlite."))
79 # Variance created as sqlite has a bunch of undefined aggregate functions.
85 def step(self
, value
):
86 self
.store
.append(value
)
89 return float(var(self
.store
))
102 hero_hudstart_def
= '1999-12-31' # default for length of Hero's stats in HUD
103 villain_hudstart_def
= '1999-12-31' # default for length of Villain's stats in HUD
105 # Data Structures for index and foreign key creation
106 # drop_code is an int with possible values: 0 - don't drop for bulk import
107 # 1 - drop during bulk import
109 # - note that mysql automatically creates indexes on constrained columns when
110 # foreign keys are created, while postgres does not. Hence the much longer list
111 # of indexes is required for postgres.
112 # all primary keys are left on all the time
114 # table column drop_code
117 [ ] # no db with index 0
118 , [ ] # no db with index 1
119 , [ # indexes for mysql (list index 2) (foreign keys not here, in next data structure)
120 # {'tab':'Players', 'col':'name', 'drop':0} unique indexes not dropped
121 # {'tab':'Hands', 'col':'siteHandNo', 'drop':0} unique indexes not dropped
122 #, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} unique indexes not dropped
124 , [ # indexes for postgres (list index 3)
125 {'tab':'Gametypes', 'col':'siteId', 'drop':0}
126 , {'tab':'Hands', 'col':'gametypeId', 'drop':0} # mct 22/3/09
127 , {'tab':'Hands', 'col':'fileId', 'drop':0} # mct 22/3/09
128 #, {'tab':'Hands', 'col':'siteHandNo', 'drop':0} unique indexes not dropped
129 , {'tab':'HandsActions', 'col':'handId', 'drop':1}
130 , {'tab':'HandsActions', 'col':'playerId', 'drop':1}
131 , {'tab':'HandsActions', 'col':'actionId', 'drop':1}
132 , {'tab':'Boards', 'col':'handId', 'drop':1}
133 , {'tab':'HandsPlayers', 'col':'handId', 'drop':1}
134 , {'tab':'HandsPlayers', 'col':'playerId', 'drop':1}
135 , {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
136 , {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
137 , {'tab':'HudCache', 'col':'playerId', 'drop':0}
138 , {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
139 , {'tab':'SessionsCache', 'col':'gametypeId', 'drop':1}
140 , {'tab':'SessionsCache', 'col':'playerId', 'drop':0}
141 , {'tab':'SessionsCache', 'col':'tourneyTypeId', 'drop':0}
142 , {'tab':'Players', 'col':'siteId', 'drop':1}
143 #, {'tab':'Players', 'col':'name', 'drop':0} unique indexes not dropped
144 , {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
145 #, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} unique indexes not dropped
146 , {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
147 #, {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0} unique indexes not dropped
148 , {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
149 , {'tab':'Backings', 'col':'tourneysPlayersId', 'drop':0}
150 , {'tab':'Backings', 'col':'playerId', 'drop':0}
151 , {'tab':'RawHands', 'col':'id', 'drop':0}
152 , {'tab':'RawTourneys', 'col':'id', 'drop':0}
154 , [ # indexes for sqlite (list index 4)
155 {'tab':'Hands', 'col':'gametypeId', 'drop':0}
156 , {'tab':'Hands', 'col':'fileId', 'drop':0}
157 , {'tab':'Boards', 'col':'handId', 'drop':0}
158 , {'tab':'HandsPlayers', 'col':'handId', 'drop':0}
159 , {'tab':'HandsPlayers', 'col':'playerId', 'drop':0}
160 , {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
161 , {'tab':'HandsActions', 'col':'handId', 'drop':0}
162 , {'tab':'HandsActions', 'col':'playerId', 'drop':0}
163 , {'tab':'HandsActions', 'col':'actionId', 'drop':1}
164 , {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
165 , {'tab':'HudCache', 'col':'playerId', 'drop':0}
166 , {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
167 , {'tab':'SessionsCache', 'col':'gametypeId', 'drop':1}
168 , {'tab':'SessionsCache', 'col':'playerId', 'drop':0}
169 , {'tab':'SessionsCache', 'col':'tourneyTypeId', 'drop':0}
170 , {'tab':'Players', 'col':'siteId', 'drop':1}
171 , {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
172 , {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
173 , {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
174 , {'tab':'Backings', 'col':'tourneysPlayersId', 'drop':0}
175 , {'tab':'Backings', 'col':'playerId', 'drop':0}
176 , {'tab':'RawHands', 'col':'id', 'drop':0}
177 , {'tab':'RawTourneys', 'col':'id', 'drop':0}
182 [ ] # no db with index 0
183 , [ ] # no db with index 1
184 , [ # foreign keys for mysql (index 2)
185 {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
186 , {'fktab':'Hands', 'fkcol':'fileId', 'rtab':'Files', 'rcol':'id', 'drop':1}
187 , {'fktab':'Boards', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
188 , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
189 , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
190 , {'fktab':'HandsPlayers', 'fkcol':'tourneysPlayersId','rtab':'TourneysPlayers','rcol':'id', 'drop':1}
191 , {'fktab':'HandsActions', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
192 , {'fktab':'HandsActions', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
193 , {'fktab':'HandsActions', 'fkcol':'actionId', 'rtab':'Actions', 'rcol':'id', 'drop':1}
194 , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
195 , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
196 , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
197 , {'fktab':'SessionsCache','fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
198 , {'fktab':'SessionsCache','fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
199 , {'fktab':'SessionsCache','fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
201 , [ # foreign keys for postgres (index 3)
202 {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
203 , {'fktab':'Hands', 'fkcol':'fileId', 'rtab':'Files', 'rcol':'id', 'drop':1}
204 , {'fktab':'Boards', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
205 , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
206 , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
207 , {'fktab':'HandsActions', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
208 , {'fktab':'HandsActions', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
209 , {'fktab':'HandsActions', 'fkcol':'actionId', 'rtab':'Actions', 'rcol':'id', 'drop':1}
210 , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
211 , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
212 , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
213 , {'fktab':'SessionsCache','fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
214 , {'fktab':'SessionsCache','fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
215 , {'fktab':'SessionsCache','fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
217 , [ # no foreign keys in sqlite (index 4)
223 # "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id
224 # - creates index handId on <thistable>.handId
225 # alter table t drop foreign key fk
226 # alter table t add foreign key (fkcol) references tab(rcol)
227 # alter table t add constraint c foreign key (fkcol) references tab(rcol)
228 # (fkcol is used for foreigh key name)
230 # mysql to list indexes: (CG - "LIST INDEXES" should work too)
231 # SELECT table_name, index_name, non_unique, column_name
232 # FROM INFORMATION_SCHEMA.STATISTICS
233 # WHERE table_name = 'tbl_name'
234 # AND table_schema = 'db_name'
235 # ORDER BY table_name, index_name, seq_in_index
237 # ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo)
238 # ALTER TABLE tab DROP INDEX idx
241 # SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name
242 # FROM information_schema.KEY_COLUMN_USAGE
243 # WHERE REFERENCED_TABLE_SCHEMA = (your schema name here)
244 # AND REFERENCED_TABLE_NAME is not null
245 # ORDER BY TABLE_NAME, COLUMN_NAME;
247 # this may indicate missing object
248 # _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)")
253 # To add a foreign key constraint to a table:
254 # ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL;
255 # ALTER TABLE tab DROP CONSTRAINT zipchk
257 # Note: index names must be unique across a schema
258 # CREATE INDEX idx ON tab(col)
260 # SELECT * FROM PG_INDEXES
265 # create index indexname on tablename (col);
268 def __init__(self
, c
, sql
= None, autoconnect
= True):
269 #log = Configuration.get_logger("logging.conf", "db", log_dir=c.dir_log)
270 log
.debug(_("Creating Database instance, sql = %s") % sql
)
272 self
.__connected
= False
274 self
.settings
['os'] = "linuxmac" if os
.name
!= "nt" else "windows"
275 db_params
= c
.get_db_parameters()
276 self
.import_options
= c
.get_import_parameters()
277 self
.backend
= db_params
['db-backend']
278 self
.db_server
= db_params
['db-server']
279 self
.database
= db_params
['db-databaseName']
280 self
.host
= db_params
['db-host']
282 gen
= c
.get_general_params()
284 self
._has
_lock
= False
286 if 'day_start' in gen
:
287 self
.day_start
= float(gen
['day_start'])
289 self
.sessionTimeout
= float(self
.import_options
['sessionTimeout'])
291 # where possible avoid creating new SQL instance by using the global one passed in
293 self
.sql
= SQL
.Sql(db_server
= self
.db_server
)
301 if self
.backend
== self
.PGSQL
:
302 from psycopg2
.extensions
import ISOLATION_LEVEL_AUTOCOMMIT
, ISOLATION_LEVEL_READ_COMMITTED
, ISOLATION_LEVEL_SERIALIZABLE
303 #ISOLATION_LEVEL_AUTOCOMMIT = 0
304 #ISOLATION_LEVEL_READ_COMMITTED = 1
305 #ISOLATION_LEVEL_SERIALIZABLE = 2
308 if self
.backend
== self
.SQLITE
and self
.database
== ':memory:' and self
.wrongDbVersion
and self
.is_connected():
309 log
.info("sqlite/:memory: - creating")
310 self
.recreate_tables()
311 self
.wrongDbVersion
= False
313 self
.gtcache
= None # GameTypeId cache
314 self
.ttcache
= None # TourneyTypeId cache
315 self
.tcache
= None # TourneyId cache
316 self
.pcache
= None # PlayerId cache
317 self
.tpcache
= None # TourneysPlayersId cache
318 self
.cachemiss
= 0 # Delete me later - using to count player cache misses
319 self
.cachehit
= 0 # Delete me later - using to count player cache hits
321 # config while trying out new hudcache mechanism
322 self
.use_date_in_hudcache
= True
324 #self.hud_hero_style = 'T' # Duplicate set of vars just for hero - not used yet.
325 #self.hud_hero_hands = 2000 # Idea is that you might want all-time stats for others
326 #self.hud_hero_days = 30 # but last T days or last H hands for yourself
328 # vars for hand ids or dates fetched according to above config:
329 self
.hand_1day_ago
= 0 # max hand id more than 24 hrs earlier than now
330 self
.date_ndays_ago
= 'd000000' # date N days ago ('d' + YYMMDD)
331 self
.h_date_ndays_ago
= 'd000000' # date N days ago ('d' + YYMMDD) for hero
332 self
.date_nhands_ago
= {} # dates N hands ago per player - not used yet
334 self
.saveActions
= False if self
.import_options
['saveActions'] == False else True
336 if self
.is_connected():
337 if not self
.wrongDbVersion
:
339 self
.connection
.rollback() # make sure any locks taken so far are released
342 def dumpDatabase(self
):
343 result
="fpdb database dump\nDB version=" + str(DB_VERSION
)+"\n\n"
345 tables
=self
.cursor
.execute(self
.sql
.query
['list_tables'])
346 tables
=self
.cursor
.fetchall()
347 for table
in (u
'Actions', u
'Autorates', u
'Backings', u
'Gametypes', u
'Hands', u
'Boards', u
'HandsActions', u
'HandsPlayers', u
'Files', u
'HudCache', u
'SessionsCache', u
'Players', u
'RawHands', u
'RawTourneys', u
'Settings', u
'Sites', u
'TourneyTypes', u
'Tourneys', u
'TourneysPlayers'):
348 print "table:", table
349 result
+="###################\nTable "+table
+"\n###################\n"
350 rows
=self
.cursor
.execute(self
.sql
.query
['get'+table
])
351 rows
=self
.cursor
.fetchall()
352 columnNames
=self
.cursor
.description
354 result
+="empty table\n"
357 for columnNumber
in range(len(columnNames
)):
358 if columnNames
[columnNumber
][0]=="importTime":
359 result
+=(" "+columnNames
[columnNumber
][0]+"=ignore\n")
360 elif columnNames
[columnNumber
][0]=="styleKey":
361 result
+=(" "+columnNames
[columnNumber
][0]+"=ignore\n")
363 result
+=(" "+columnNames
[columnNumber
][0]+"="+str(row
[columnNumber
])+"\n")
367 #end def dumpDatabase
369 # could be used by hud to change hud style
370 def set_hud_style(self
, style
):
371 self
.hud_style
= style
373 def do_connect(self
, c
):
375 raise FpdbError('Configuration not defined')
377 db
= c
.get_db_parameters()
379 self
.connect(backend
=db
['db-backend'],
381 database
=db
['db-databaseName'],
383 password
=db
['db-password'])
385 # error during connect
386 self
.__connected
= False
389 db_params
= c
.get_db_parameters()
390 self
.import_options
= c
.get_import_parameters()
391 self
.backend
= db_params
['db-backend']
392 self
.db_server
= db_params
['db-server']
393 self
.database
= db_params
['db-databaseName']
394 self
.host
= db_params
['db-host']
396 def connect(self
, backend
=None, host
=None, database
=None,
397 user
=None, password
=None, create
=False):
398 """Connects a database with the given parameters"""
400 raise FpdbError('Database backend not defined')
401 self
.backend
= backend
404 self
.password
= password
405 self
.database
= database
406 self
.connection
= None
409 if backend
== Database
.MYSQL_INNODB
:
412 MySQLdb
= pool
.manage(MySQLdb
, pool_size
=5)
414 self
.connection
= MySQLdb
.connect(host
=host
420 self
.__connected
= True
421 #TODO: Add port option
422 except MySQLdb
.Error
, ex
:
423 if ex
.args
[0] == 1045:
424 raise FpdbMySQLAccessDenied(ex
.args
[0], ex
.args
[1])
425 elif ex
.args
[0] == 2002 or ex
.args
[0] == 2003: # 2002 is no unix socket, 2003 is no tcp socket
426 raise FpdbMySQLNoDatabase(ex
.args
[0], ex
.args
[1])
428 print _("*** WARNING UNKNOWN MYSQL ERROR:"), ex
429 elif backend
== Database
.PGSQL
:
431 import psycopg2
.extensions
433 psycopg2
= pool
.manage(psycopg2
, pool_size
=5)
434 psycopg2
.extensions
.register_type(psycopg2
.extensions
.UNICODE
)
435 # If DB connection is made over TCP, then the variables
436 # host, user and password are required
437 # For local domain-socket connections, only DB name is
438 # needed, and everything else is in fact undefined and/or
440 # sqlcoder: This database only connect failed in my windows setup??
441 # Modifed it to try the 4 parameter style if the first connect fails - does this work everywhere?
442 self
.__connected
= False
443 if self
.host
== "localhost" or self
.host
== "127.0.0.1":
445 self
.connection
= psycopg2
.connect(database
= database
)
446 self
.__connected
= True
448 # direct connection failed so try user/pass/... version
450 if not self
.is_connected():
452 self
.connection
= psycopg2
.connect(host
= host
,
456 self
.__connected
= True
457 except Exception, ex
:
458 if 'Connection refused' in ex
.args
[0]:
459 # meaning eg. db not running
460 raise FpdbPostgresqlNoDatabase(errmsg
= ex
.args
[0])
461 elif 'password authentication' in ex
.args
[0]:
462 raise FpdbPostgresqlAccessDenied(errmsg
= ex
.args
[0])
467 elif backend
== Database
.SQLITE
:
471 sqlite3
= pool
.manage(sqlite3
, pool_size
=1)
473 # log.warning("SQLite won't work well without 'sqlalchemy' installed.")
475 if database
!= ":memory:":
476 if not os
.path
.isdir(self
.config
.dir_database
) and create
:
477 print _("Creating directory: '%s'") % (self
.config
.dir_database
)
478 log
.info(_("Creating directory: '%s'") % (self
.config
.dir_database
))
479 os
.mkdir(self
.config
.dir_database
)
480 database
= os
.path
.join(self
.config
.dir_database
, database
)
481 self
.db_path
= database
482 log
.info(_("Connecting to SQLite: %s") % self
.db_path
)
483 if os
.path
.exists(database
) or create
:
484 self
.connection
= sqlite3
.connect(self
.db_path
, detect_types
=sqlite3
.PARSE_DECLTYPES
)
485 self
.__connected
= True
486 sqlite3
.register_converter("bool", lambda x
: bool(int(x
)))
487 sqlite3
.register_adapter(bool, lambda x
: 1 if x
else 0)
488 self
.connection
.create_function("floor", 1, math
.floor
)
490 self
.connection
.create_function("mod", 2, tmp
.mod
)
492 self
.connection
.create_aggregate("variance", 1, VARIANCE
)
494 log
.warning(_("Some database functions will not work without NumPy support"))
495 self
.cursor
= self
.connection
.cursor()
496 self
.cursor
.execute('PRAGMA temp_store=2') # use memory for temp tables/indexes
497 self
.cursor
.execute('PRAGMA journal_mode=WAL') # use memory for temp tables/indexes
498 self
.cursor
.execute('PRAGMA synchronous=0') # don't wait for file writes to finish
500 raise FpdbError("sqlite database "+database
+" does not exist")
502 raise FpdbError("unrecognised database backend:"+str(backend
))
504 if self
.is_connected():
505 self
.cursor
= self
.connection
.cursor()
506 self
.cursor
.execute(self
.sql
.query
['set tx level'])
507 self
.check_version(database
=database
, create
=create
)
510 self
.cursor
.execute("SELECT name,id FROM Sites")
511 sites
= self
.cursor
.fetchall()
512 self
.config
.set_site_ids(sites
)
514 def check_version(self
, database
, create
):
515 self
.wrongDbVersion
= False
517 self
.cursor
.execute("SELECT * FROM Settings")
518 settings
= self
.cursor
.fetchone()
519 if settings
[0] != DB_VERSION
:
520 log
.error((_("Outdated or too new database version (%s).") % (settings
[0])) + " " + _("Please recreate tables."))
521 self
.wrongDbVersion
= True
522 except:# _mysql_exceptions.ProgrammingError:
523 if database
!= ":memory:":
525 #print (_("Failed to read settings table.") + " - " + _("Recreating tables."))
526 log
.info(_("Failed to read settings table.") + " - " + _("Recreating tables."))
527 self
.recreate_tables()
528 self
.check_version(database
=database
, create
=False)
530 #print (_("Failed to read settings table.") + " - " + _("Please recreate tables."))
531 log
.info(_("Failed to read settings table.") + " - " + _("Please recreate tables."))
532 self
.wrongDbVersion
= True
534 self
.wrongDbVersion
= True
538 if self
.backend
!= self
.SQLITE
:
539 self
.connection
.commit()
541 # sqlite commits can fail because of shared locks on the database (SQLITE_BUSY)
542 # re-try commit if it fails in case this happened
546 for i
in xrange(maxtimes
):
548 ret
= self
.connection
.commit()
549 #log.debug(_("commit finished ok, i = ")+str(i))
552 log
.debug(_("commit %s failed: info=%s value=%s") % (str(i
), str(sys
.exc_info()), str(sys
.exc_value
)))
556 log
.debug(_("commit failed"))
557 raise FpdbError('sqlite commit failed')
560 self
.connection
.rollback()
563 """ now deprecated, use is_connected() instead """
564 return self
.__connected
566 def is_connected(self
):
567 return self
.__connected
569 def get_cursor(self
):
570 return self
.connection
.cursor()
572 def close_connection(self
):
573 self
.connection
.close()
575 def disconnect(self
, due_to_error
=False):
576 """Disconnects the DB (rolls back if param is true, otherwise commits"""
578 self
.connection
.rollback()
580 self
.connection
.commit()
582 self
.connection
.close()
583 self
.__connected
= False
585 def reconnect(self
, due_to_error
=False):
586 """Reconnects the DB"""
587 #print "started reconnect"
588 self
.disconnect(due_to_error
)
589 self
.connect(self
.backend
, self
.host
, self
.database
, self
.user
, self
.password
)
591 def get_backend_name(self
):
592 """Returns the name of the currently used backend"""
594 return "MySQL InnoDB"
595 elif self
.backend
==3:
597 elif self
.backend
==4:
600 raise FpdbError("invalid backend")
602 def get_db_info(self
):
603 return (self
.host
, self
.database
, self
.user
, self
.password
)
605 def get_table_name(self
, hand_id
):
606 c
= self
.connection
.cursor()
607 c
.execute(self
.sql
.query
['get_table_name'], (hand_id
, ))
611 def get_table_info(self
, hand_id
):
612 c
= self
.connection
.cursor()
613 c
.execute(self
.sql
.query
['get_table_name'], (hand_id
, ))
616 if row
[3] == "ring": # cash game
621 tour_no
, tab_no
= re
.split(" ", row
[0])
626 def get_last_hand(self
):
627 c
= self
.connection
.cursor()
628 c
.execute(self
.sql
.query
['get_last_hand'])
632 def get_xml(self
, hand_id
):
633 c
= self
.connection
.cursor()
634 c
.execute(self
.sql
.query
['get_xml'], (hand_id
))
638 def get_recent_hands(self
, last_hand
):
639 c
= self
.connection
.cursor()
640 c
.execute(self
.sql
.query
['get_recent_hands'], {'last_hand': last_hand
})
643 def get_hand_info(self
, new_hand_id
):
644 c
= self
.connection
.cursor()
645 c
.execute(self
.sql
.query
['get_hand_info'], new_hand_id
)
648 def getHandCount(self
):
649 c
= self
.connection
.cursor()
650 c
.execute(self
.sql
.query
['getHandCount'])
651 return c
.fetchone()[0]
652 #end def getHandCount
654 def getTourneyCount(self
):
655 c
= self
.connection
.cursor()
656 c
.execute(self
.sql
.query
['getTourneyCount'])
657 return c
.fetchone()[0]
658 #end def getTourneyCount
660 def getTourneyTypeCount(self
):
661 c
= self
.connection
.cursor()
662 c
.execute(self
.sql
.query
['getTourneyTypeCount'])
663 return c
.fetchone()[0]
664 #end def getTourneyCount
666 def getSiteTourneyNos(self
, site
):
667 c
= self
.connection
.cursor()
668 # FIXME: Take site and actually fetch siteId from that
669 # Fixed to Winamax atm
670 q
= self
.sql
.query
['getSiteTourneyNos']
671 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
674 for row
in c
.fetchall():
678 def get_actual_seat(self
, hand_id
, name
):
679 c
= self
.connection
.cursor()
680 c
.execute(self
.sql
.query
['get_actual_seat'], (hand_id
, name
))
684 def get_cards(self
, hand
):
685 """Get and return the cards for each player in the hand."""
686 cards
= {} # dict of cards, the key is the seat number,
687 # the value is a tuple of the players cards
688 # example: {1: (0, 0, 20, 21, 22, 0 , 0)}
689 c
= self
.connection
.cursor()
690 c
.execute(self
.sql
.query
['get_cards'], [hand
])
691 for row
in c
.fetchall():
692 cards
[row
[0]] = row
[1:]
695 def get_common_cards(self
, hand
):
696 """Get and return the community cards for the specified hand."""
698 c
= self
.connection
.cursor()
699 c
.execute(self
.sql
.query
['get_common_cards'], [hand
])
701 cards
['common'] = c
.fetchone()
704 def get_action_from_hand(self
, hand_no
):
705 action
= [ [], [], [], [], [] ]
706 c
= self
.connection
.cursor()
707 c
.execute(self
.sql
.query
['get_action_from_hand'], (hand_no
,))
708 for row
in c
.fetchall():
711 action
[street
].append(act
)
714 def get_winners_from_hand(self
, hand
):
715 """Returns a hash of winners:amount won, given a hand number."""
717 c
= self
.connection
.cursor()
718 c
.execute(self
.sql
.query
['get_winners_from_hand'], (hand
,))
719 for row
in c
.fetchall():
720 winners
[row
[0]] = row
[1]
723 def init_hud_stat_vars(self
, hud_days
, h_hud_days
):
724 """Initialise variables used by Hud to fetch stats:
725 self.hand_1day_ago handId of latest hand played more than a day ago
726 self.date_ndays_ago date n days ago
727 self.h_date_ndays_ago date n days ago for hero (different n)
730 self
.hand_1day_ago
= 1
732 c
= self
.get_cursor()
733 c
.execute(self
.sql
.query
['get_hand_1day_ago'])
735 except: # TODO: what error is a database error?!
736 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
737 print _("*** Database Error: ") + err
[2] + "(" + str(err
[1]) + "): " + str(sys
.exc_info()[1])
740 self
.hand_1day_ago
= int(row
[0])
742 tz
= datetime
.utcnow() - datetime
.today()
743 tz_offset
= tz
.seconds
/3600
744 tz_day_start_offset
= self
.day_start
+ tz_offset
746 d
= timedelta(days
=hud_days
, hours
=tz_day_start_offset
)
747 now
= datetime
.utcnow() - d
748 self
.date_ndays_ago
= "d%02d%02d%02d" % (now
.year
- 2000, now
.month
, now
.day
)
750 d
= timedelta(days
=h_hud_days
, hours
=tz_day_start_offset
)
751 now
= datetime
.utcnow() - d
752 self
.h_date_ndays_ago
= "d%02d%02d%02d" % (now
.year
- 2000, now
.month
, now
.day
)
754 def init_player_hud_stat_vars(self
, playerid
):
755 # not sure if this is workable, to be continued ...
757 # self.date_nhands_ago is used for fetching stats for last n hands (hud_style = 'H')
758 # This option not used yet - needs to be called for each player :-(
759 self
.date_nhands_ago
[str(playerid
)] = 'd000000'
761 # should use aggregated version of query if appropriate
762 c
.execute(self
.sql
.query
['get_date_nhands_ago'], (self
.hud_hands
, playerid
))
765 self
.date_nhands_ago
[str(playerid
)] = row
[0]
767 print _("Database: n hands ago the date was:") + " " + self
.date_nhands_ago
[str(playerid
)] + " (playerid "+str(playerid
)+")"
769 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
770 print _("*** Database Error: ")+err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
772 # is get_stats_from_hand slow?
773 def get_stats_from_hand( self
, hand
, type # type is "ring" or "tour"
774 , hud_params
= {'hud_style':'A', 'agg_bb_mult':1000
775 ,'seats_style':'A', 'seats_cust_nums':['n/a', 'n/a', (2,2), (3,4), (3,5), (4,6), (5,7), (6,8), (7,9), (8,10), (8,10)]
776 ,'h_hud_style':'S', 'h_agg_bb_mult':1000
777 ,'h_seats_style':'A', 'h_seats_cust_nums':['n/a', 'n/a', (2,2), (3,4), (3,5), (4,6), (5,7), (6,8), (7,9), (8,10), (8,10)]
782 hud_style
= hud_params
['hud_style']
783 agg_bb_mult
= hud_params
['agg_bb_mult']
784 seats_style
= hud_params
['seats_style']
785 seats_cust_nums
= hud_params
['seats_cust_nums']
786 h_hud_style
= hud_params
['h_hud_style']
787 h_agg_bb_mult
= hud_params
['h_agg_bb_mult']
788 h_seats_style
= hud_params
['h_seats_style']
789 h_seats_cust_nums
= hud_params
['h_seats_cust_nums']
793 if seats_style
== 'A':
794 seats_min
, seats_max
= 0, 10
795 elif seats_style
== 'C':
796 seats_min
, seats_max
= seats_cust_nums
[num_seats
][0], seats_cust_nums
[num_seats
][1]
797 elif seats_style
== 'E':
798 seats_min
, seats_max
= num_seats
, num_seats
800 seats_min
, seats_max
= 0, 10
801 print "bad seats_style value:", seats_style
803 if h_seats_style
== 'A':
804 h_seats_min
, h_seats_max
= 0, 10
805 elif h_seats_style
== 'C':
806 h_seats_min
, h_seats_max
= h_seats_cust_nums
[num_seats
][0], h_seats_cust_nums
[num_seats
][1]
807 elif h_seats_style
== 'E':
808 h_seats_min
, h_seats_max
= num_seats
, num_seats
810 h_seats_min
, h_seats_max
= 0, 10
811 print "bad h_seats_style value:", h_seats_style
812 log
.info("opp seats style %s %d %d hero seats style %s %d %d"
813 % (seats_style
, seats_min
, seats_max
814 ,h_seats_style
, h_seats_min
, h_seats_max
) )
816 if hud_style
== 'S' or h_hud_style
== 'S':
817 self
.get_stats_from_hand_session(hand
, stat_dict
, hero_id
818 ,hud_style
, seats_min
, seats_max
819 ,h_hud_style
, h_seats_min
, h_seats_max
)
821 if hud_style
== 'S' and h_hud_style
== 'S':
825 stylekey
= self
.date_ndays_ago
826 elif hud_style
== 'A':
827 stylekey
= '0000000' # all stylekey values should be higher than this
828 elif hud_style
== 'S':
829 stylekey
= 'zzzzzzz' # all stylekey values should be lower than this
832 log
.info('hud_style: %s' % hud_style
)
834 #elif hud_style == 'H':
835 # stylekey = date_nhands_ago needs array by player here ...
837 if h_hud_style
== 'T':
838 h_stylekey
= self
.h_date_ndays_ago
839 elif h_hud_style
== 'A':
840 h_stylekey
= '0000000' # all stylekey values should be higher than this
841 elif h_hud_style
== 'S':
842 h_stylekey
= 'zzzzzzz' # all stylekey values should be lower than this
844 h_stylekey
= '00000000'
845 log
.info('h_hud_style: %s' % h_hud_style
)
847 #elif h_hud_style == 'H':
848 # h_stylekey = date_nhands_ago needs array by player here ...
850 query
= 'get_stats_from_hand_aggregated'
852 ,hero_id
, stylekey
, agg_bb_mult
, agg_bb_mult
, seats_min
, seats_max
# hero params
853 ,hero_id
, h_stylekey
, h_agg_bb_mult
, h_agg_bb_mult
, h_seats_min
, h_seats_max
) # villain params
855 #print "get stats: hud style =", hud_style, "query =", query, "subs =", subs
856 c
= self
.connection
.cursor()
859 c
.execute(self
.sql
.query
[query
], subs
)
860 #for row in c.fetchall(): # needs "explain query plan" in sql statement
861 # print "query plan: ", row
862 colnames
= [desc
[0] for desc
in c
.description
]
863 for row
in c
.fetchall():
865 if (playerid
== hero_id
and h_hud_style
!= 'S') or (playerid
!= hero_id
and hud_style
!= 'S'):
867 for name
, val
in zip(colnames
, row
):
868 t_dict
[name
.lower()] = val
870 stat_dict
[t_dict
['player_id']] = t_dict
874 # uses query on handsplayers instead of hudcache to get stats on just this session
875 def get_stats_from_hand_session(self
, hand
, stat_dict
, hero_id
876 ,hud_style
, seats_min
, seats_max
877 ,h_hud_style
, h_seats_min
, h_seats_max
):
878 """Get stats for just this session (currently defined as any play in the last 24 hours - to
879 be improved at some point ...)
880 h_hud_style and hud_style params indicate whether to get stats for hero and/or others
881 - only fetch heros stats if h_hud_style == 'S',
882 and only fetch others stats if hud_style == 'S'
883 seats_min/max params give seats limits, only include stats if between these values
886 query
= self
.sql
.query
['get_stats_from_hand_session']
887 if self
.db_server
== 'mysql':
888 query
= query
.replace("<signed>", 'signed ')
890 query
= query
.replace("<signed>", '')
892 subs
= (self
.hand_1day_ago
, hand
, hero_id
, seats_min
, seats_max
893 , hero_id
, h_seats_min
, h_seats_max
)
894 c
= self
.get_cursor()
897 #print "sess_stats: subs =", subs, "subs[0] =", subs[0]
898 c
.execute(query
, subs
)
899 colnames
= [desc
[0] for desc
in c
.description
]
903 if colnames
[0].lower() == 'player_id':
905 # Loop through stats adding them to appropriate stat_dict:
909 if (playerid
== hero_id
and h_hud_style
== 'S') or (playerid
!= hero_id
and hud_style
== 'S'):
910 for name
, val
in zip(colnames
, row
):
911 if not playerid
in stat_dict
:
912 stat_dict
[playerid
] = {}
913 stat_dict
[playerid
][name
.lower()] = val
914 elif not name
.lower() in stat_dict
[playerid
]:
915 stat_dict
[playerid
][name
.lower()] = val
916 elif name
.lower() not in ('hand_id', 'player_id', 'seat', 'screen_name', 'seats'):
917 #print "DEBUG: stat_dict[%s][%s]: %s" %(playerid, name.lower(), val)
918 stat_dict
[playerid
][name
.lower()] += val
920 if n
>= 10000: break # todo: don't think this is needed so set nice and high
921 # prevents infinite loop so leave for now - comment out or remove?
924 log
.error(_("ERROR: query %s result does not have player_id as first column") % (query
,))
926 #print " %d rows fetched, len(stat_dict) = %d" % (n, len(stat_dict))
928 #print "session stat_dict =", stat_dict
931 def get_player_id(self
, config
, siteName
, playerName
):
932 c
= self
.connection
.cursor()
933 siteNameUtf
= Charset
.to_utf8(siteName
)
934 playerNameUtf
= unicode(playerName
)
935 #print "db.get_player_id siteName",siteName,"playerName",playerName
936 c
.execute(self
.sql
.query
['get_player_id'], (playerNameUtf
, siteNameUtf
))
943 def get_player_names(self
, config
, site_id
=None, like_player_name
="%"):
944 """Fetch player names from players. Use site_id and like_player_name if provided"""
948 c
= self
.get_cursor()
949 p_name
= Charset
.to_utf8(like_player_name
)
950 c
.execute(self
.sql
.query
['get_player_names'], (p_name
, site_id
, site_id
))
954 def get_site_id(self
, site
):
955 c
= self
.get_cursor()
956 c
.execute(self
.sql
.query
['getSiteId'], (site
,))
957 result
= c
.fetchall()
960 def resetPlayerIDs(self
):
964 def getSqlPlayerIDs(self
, pnames
, siteid
):
966 if(self
.pcache
== None):
967 self
.pcache
= LambdaDict(lambda key
:self
.insertPlayer(key
[0], key
[1]))
969 for player
in pnames
:
970 result
[player
] = self
.pcache
[(player
,siteid
)]
971 # NOTE: Using the LambdaDict does the same thing as:
972 #if player in self.pcache:
973 # #print "DEBUG: cachehit"
976 # self.pcache[player] = self.insertPlayer(player, siteid)
977 #result[player] = self.pcache[player]
981 def insertPlayer(self
, name
, site_id
):
983 _name
= Charset
.to_db_utf8(name
)
984 c
= self
.get_cursor()
985 q
= "SELECT id, name FROM Players WHERE siteid=%s and name=%s"
986 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
988 #NOTE/FIXME?: MySQL has ON DUPLICATE KEY UPDATE
990 # INSERT INTO `tags` (`tag`, `count`)
992 # ON DUPLICATE KEY UPDATE `count`=`count`+1;
995 #print "DEBUG: name: %s site: %s" %(name, site_id)
997 c
.execute (q
, (site_id
, _name
))
1000 if (tmp
== None): #new player
1001 c
.execute ("INSERT INTO Players (name, siteId) VALUES (%s, %s)".replace('%s',self
.sql
.query
['placeholder'])
1003 #Get last id might be faster here.
1004 #c.execute ("SELECT id FROM Players WHERE name=%s", (name,))
1005 result
= self
.get_last_insert_id(c
)
1010 def get_last_insert_id(self
, cursor
=None):
1013 if self
.backend
== self
.MYSQL_INNODB
:
1014 ret
= self
.connection
.insert_id()
1015 if ret
< 1 or ret
> 999999999:
1016 log
.warning(_("getLastInsertId(): problem fetching insert_id? ret=%d") % ret
)
1018 elif self
.backend
== self
.PGSQL
:
1020 # currval(hands_id_seq) - use name of implicit seq here
1021 # lastval() - still needs sequences set up?
1022 # insert ... returning is useful syntax (but postgres specific?)
1023 # see rules (fancy trigger type things)
1024 c
= self
.get_cursor()
1025 ret
= c
.execute ("SELECT lastval()")
1028 log
.warning(_("getLastInsertId(%s): problem fetching lastval? row=%d") % (seq
, row
))
1032 elif self
.backend
== self
.SQLITE
:
1033 ret
= cursor
.lastrowid
1035 log
.error(_("getLastInsertId(): unknown backend: %d") % self
.backend
)
1039 err
= traceback
.extract_tb(sys
.exc_info()[2])
1040 print _("*** Database get_last_insert_id error: ") + str(sys
.exc_info()[1])
1041 print "\n".join( [e
[0]+':'+str(e
[1])+" "+e
[2] for e
in err
] )
1046 def prepareBulkImport(self
):
1047 """Drop some indexes/foreign keys to prepare for bulk import.
1048 Currently keeping the standalone indexes as needed to import quickly"""
1050 c
= self
.get_cursor()
1051 # sc: don't think autocommit=0 is needed, should already be in that mode
1052 if self
.backend
== self
.MYSQL_INNODB
:
1053 c
.execute("SET foreign_key_checks=0")
1054 c
.execute("SET autocommit=0")
1056 if self
.backend
== self
.PGSQL
:
1057 self
.connection
.set_isolation_level(0) # allow table/index operations to work
1058 for fk
in self
.foreignKeys
[self
.backend
]:
1060 if self
.backend
== self
.MYSQL_INNODB
:
1061 c
.execute("SELECT constraint_name " +
1062 "FROM information_schema.KEY_COLUMN_USAGE " +
1063 #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
1065 "AND table_name = %s AND column_name = %s " +
1066 "AND referenced_table_name = %s " +
1067 "AND referenced_column_name = %s ",
1068 (fk
['fktab'], fk
['fkcol'], fk
['rtab'], fk
['rcol']) )
1070 #print "preparebulk find fk: cons=", cons
1072 print _("Dropping foreign key:"), cons
[0], fk
['fktab'], fk
['fkcol']
1074 c
.execute("alter table " + fk
['fktab'] + " drop foreign key " + cons
[0])
1076 print _("Warning:"), _("Drop foreign key %s_%s_fkey failed: %s, continuing ...") \
1077 % (fk
['fktab'], fk
['fkcol'], str(sys
.exc_value
).rstrip('\n') )
1078 elif self
.backend
== self
.PGSQL
:
1079 # DON'T FORGET TO RECREATE THEM!!
1080 print _("Dropping foreign key:"), fk
['fktab'], fk
['fkcol']
1082 # try to lock table to see if index drop will work:
1083 # hmmm, tested by commenting out rollback in grapher. lock seems to work but
1084 # then drop still hangs :-( does work in some tests though??
1085 # will leave code here for now pending further tests/enhancement ...
1086 c
.execute("BEGIN TRANSACTION")
1087 c
.execute( "lock table %s in exclusive mode nowait" % (fk
['fktab'],) )
1088 #print "after lock, status:", c.statusmessage
1089 #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
1091 c
.execute("alter table %s drop constraint %s_%s_fkey" % (fk
['fktab'], fk
['fktab'], fk
['fkcol']))
1092 print _("dropped foreign key %s_%s_fkey, continuing ...") % (fk
['fktab'], fk
['fkcol'])
1094 if "does not exist" not in str(sys
.exc_value
):
1095 print _("Warning:"), _("Drop foreign key %s_%s_fkey failed: %s, continuing ...") \
1096 % (fk
['fktab'], fk
['fkcol'], str(sys
.exc_value
).rstrip('\n') )
1097 c
.execute("END TRANSACTION")
1099 print _("Warning:"), _("constraint %s_%s_fkey not dropped: %s, continuing ...") \
1100 % (fk
['fktab'],fk
['fkcol'], str(sys
.exc_value
).rstrip('\n'))
1104 for idx
in self
.indexes
[self
.backend
]:
1105 if idx
['drop'] == 1:
1106 if self
.backend
== self
.MYSQL_INNODB
:
1107 print _("Dropping index:"), idx
['tab'], idx
['col']
1109 # apparently nowait is not implemented in mysql so this just hangs if there are locks
1110 # preventing the index drop :-(
1111 c
.execute( "alter table %s drop index %s;", (idx
['tab'],idx
['col']) )
1113 print _("Drop index failed:"), str(sys
.exc_info())
1114 # ALTER TABLE `fpdb`.`handsplayers` DROP INDEX `playerId`;
1115 # using: 'HandsPlayers' drop index 'playerId'
1116 elif self
.backend
== self
.PGSQL
:
1117 # DON'T FORGET TO RECREATE THEM!!
1118 print _("Dropping index:"), idx
['tab'], idx
['col']
1120 # try to lock table to see if index drop will work:
1121 c
.execute("BEGIN TRANSACTION")
1122 c
.execute( "lock table %s in exclusive mode nowait" % (idx
['tab'],) )
1123 #print "after lock, status:", c.statusmessage
1125 # table locked ok so index drop should work:
1126 #print "drop index %s_%s_idx" % (idx['tab'],idx['col'])
1127 c
.execute( "drop index if exists %s_%s_idx" % (idx
['tab'],idx
['col']) )
1128 #print "dropped pg index ", idx['tab'], idx['col']
1130 if "does not exist" not in str(sys
.exc_value
):
1131 print _("Warning:"), _("drop index %s_%s_idx failed: %s, continuing ...") \
1132 % (idx
['tab'],idx
['col'], str(sys
.exc_value
).rstrip('\n'))
1133 c
.execute("END TRANSACTION")
1135 print _("Warning:"), _("index %s_%s_idx not dropped: %s, continuing ...") \
1136 % (idx
['tab'],idx
['col'], str(sys
.exc_value
).rstrip('\n'))
1140 if self
.backend
== self
.PGSQL
:
1141 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1142 self
.commit() # seems to clear up errors if there were any in postgres
1143 ptime
= time() - stime
1144 print (_("prepare import took %s seconds") % ptime
)
1145 #end def prepareBulkImport
1147 def afterBulkImport(self
):
1148 """Re-create any dropped indexes/foreign keys after bulk import"""
1151 c
= self
.get_cursor()
1152 if self
.backend
== self
.MYSQL_INNODB
:
1153 c
.execute("SET foreign_key_checks=1")
1154 c
.execute("SET autocommit=1")
1157 if self
.backend
== self
.PGSQL
:
1158 self
.connection
.set_isolation_level(0) # allow table/index operations to work
1159 for fk
in self
.foreignKeys
[self
.backend
]:
1161 if self
.backend
== self
.MYSQL_INNODB
:
1162 c
.execute("SELECT constraint_name " +
1163 "FROM information_schema.KEY_COLUMN_USAGE " +
1164 #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
1166 "AND table_name = %s AND column_name = %s " +
1167 "AND referenced_table_name = %s " +
1168 "AND referenced_column_name = %s ",
1169 (fk
['fktab'], fk
['fkcol'], fk
['rtab'], fk
['rcol']) )
1171 #print "afterbulk: cons=", cons
1175 print _("Creating foreign key:"), fk
['fktab'], fk
['fkcol'], "->", fk
['rtab'], fk
['rcol']
1177 c
.execute("alter table " + fk
['fktab'] + " add foreign key ("
1178 + fk
['fkcol'] + ") references " + fk
['rtab'] + "("
1181 print _("Create foreign key failed:"), str(sys
.exc_info())
1182 elif self
.backend
== self
.PGSQL
:
1183 print _("Creating foreign key:"), fk
['fktab'], fk
['fkcol'], "->", fk
['rtab'], fk
['rcol']
1185 c
.execute("alter table " + fk
['fktab'] + " add constraint "
1186 + fk
['fktab'] + '_' + fk
['fkcol'] + '_fkey'
1187 + " foreign key (" + fk
['fkcol']
1188 + ") references " + fk
['rtab'] + "(" + fk
['rcol'] + ")")
1190 print _("Create foreign key failed:"), str(sys
.exc_info())
1194 for idx
in self
.indexes
[self
.backend
]:
1195 if idx
['drop'] == 1:
1196 if self
.backend
== self
.MYSQL_INNODB
:
1197 print _("Creating index %s %s") % (idx
['tab'], idx
['col'])
1199 s
= "alter table %s add index %s(%s)" % (idx
['tab'],idx
['col'],idx
['col'])
1202 print _("Create foreign key failed:"), str(sys
.exc_info())
1203 elif self
.backend
== self
.PGSQL
:
1205 # mod to use tab_col for index name?
1206 print _("Creating index %s %s") % (idx
['tab'], idx
['col'])
1208 s
= "create index %s_%s_idx on %s(%s)" % (idx
['tab'], idx
['col'], idx
['tab'], idx
['col'])
1211 print _("Create index failed:"), str(sys
.exc_info())
1215 if self
.backend
== self
.PGSQL
:
1216 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1217 self
.commit() # seems to clear up errors if there were any in postgres
1218 atime
= time() - stime
1219 print (_("After import took %s seconds") % atime
)
1220 #end def afterBulkImport
1222 def drop_referential_integrity(self
):
1223 """Update all tables to remove foreign keys"""
1225 c
= self
.get_cursor()
1226 c
.execute(self
.sql
.query
['list_tables'])
1227 result
= c
.fetchall()
1229 for i
in range(len(result
)):
1230 c
.execute("SHOW CREATE TABLE " + result
[i
][0])
1231 inner
= c
.fetchall()
1233 for j
in range(len(inner
)):
1234 # result[i][0] - Table name
1235 # result[i][1] - CREATE TABLE parameters
1236 #Searching for CONSTRAINT `tablename_ibfk_1`
1237 for m
in re
.finditer('(ibfk_[0-9]+)', inner
[j
][1]):
1238 key
= "`" + inner
[j
][0] + "_" + m
.group() + "`"
1239 c
.execute("ALTER TABLE " + inner
[j
][0] + " DROP FOREIGN KEY " + key
)
1241 #end drop_referential_inegrity
1243 def recreate_tables(self
):
1244 """(Re-)creates the tables of the current DB"""
1247 self
.resetPlayerIDs()
1248 self
.create_tables()
1249 self
.createAllIndexes()
1252 #print _("Finished recreating tables")
1253 log
.info(_("Finished recreating tables"))
1254 #end def recreate_tables
1256 def create_tables(self
):
1257 #todo: should detect and fail gracefully if tables already exist.
1259 log
.debug(self
.sql
.query
['createSettingsTable'])
1260 c
= self
.get_cursor()
1261 c
.execute(self
.sql
.query
['createSettingsTable'])
1263 log
.debug("Creating tables")
1264 c
.execute(self
.sql
.query
['createActionsTable'])
1265 c
.execute(self
.sql
.query
['createSitesTable'])
1266 c
.execute(self
.sql
.query
['createGametypesTable'])
1267 c
.execute(self
.sql
.query
['createFilesTable'])
1268 c
.execute(self
.sql
.query
['createPlayersTable'])
1269 c
.execute(self
.sql
.query
['createAutoratesTable'])
1270 c
.execute(self
.sql
.query
['createHandsTable'])
1271 c
.execute(self
.sql
.query
['createBoardsTable'])
1272 c
.execute(self
.sql
.query
['createTourneyTypesTable'])
1273 c
.execute(self
.sql
.query
['createTourneysTable'])
1274 c
.execute(self
.sql
.query
['createTourneysPlayersTable'])
1275 c
.execute(self
.sql
.query
['createHandsPlayersTable'])
1276 c
.execute(self
.sql
.query
['createHandsActionsTable'])
1277 c
.execute(self
.sql
.query
['createHudCacheTable'])
1278 c
.execute(self
.sql
.query
['createSessionsCacheTable'])
1279 c
.execute(self
.sql
.query
['createBackingsTable'])
1280 c
.execute(self
.sql
.query
['createRawHands'])
1281 c
.execute(self
.sql
.query
['createRawTourneys'])
1283 # Create sessionscache indexes
1284 log
.debug("Creating SessionsCache indexes")
1285 c
.execute(self
.sql
.query
['addSessionIdIndex'])
1286 c
.execute(self
.sql
.query
['addHandsSessionIdIndex'])
1287 c
.execute(self
.sql
.query
['addHandsGameSessionIdIndex'])
1289 # Create unique indexes:
1290 log
.debug("Creating unique indexes")
1291 c
.execute(self
.sql
.query
['addTourneyIndex'])
1292 c
.execute(self
.sql
.query
['addHandsIndex'])
1293 c
.execute(self
.sql
.query
['addPlayersIndex'])
1294 c
.execute(self
.sql
.query
['addTPlayersIndex'])
1295 c
.execute(self
.sql
.query
['addTTypesIndex'])
1297 self
.fillDefaultData()
1300 #print "Error creating tables: ", str(sys.exc_value)
1301 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
1302 print _("***Error creating tables:"), err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
1307 def drop_tables(self
):
1308 """Drops the fpdb tables from the current db"""
1310 c
= self
.get_cursor()
1312 print _("*** Error unable to get databasecursor")
1314 backend
= self
.get_backend_name()
1315 if backend
== 'MySQL InnoDB': # what happens if someone is using MyISAM?
1317 self
.drop_referential_integrity() # needed to drop tables with foreign keys
1318 c
.execute(self
.sql
.query
['list_tables'])
1319 tables
= c
.fetchall()
1320 for table
in tables
:
1321 c
.execute(self
.sql
.query
['drop_table'] + table
[0])
1323 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
1324 print _("***Error dropping tables:"), +err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
1326 elif backend
== 'PostgreSQL':
1329 c
.execute(self
.sql
.query
['list_tables'])
1330 tables
= c
.fetchall()
1331 for table
in tables
:
1332 c
.execute(self
.sql
.query
['drop_table'] + table
[0] + ' cascade')
1334 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
1335 print _("***Error dropping tables:"), err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
1337 elif backend
== 'SQLite':
1339 c
.execute(self
.sql
.query
['list_tables'])
1340 for table
in c
.fetchall():
1341 log
.debug(self
.sql
.query
['drop_table'] + table
[0])
1342 c
.execute(self
.sql
.query
['drop_table'] + table
[0])
1344 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
1345 print _("***Error dropping tables:"), err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
1350 print _("*** Error in committing table drop")
1351 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
1352 print _("***Error dropping tables:"), err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
1354 #end def drop_tables
1356 def createAllIndexes(self
):
1357 """Create new indexes"""
1360 if self
.backend
== self
.PGSQL
:
1361 self
.connection
.set_isolation_level(0) # allow table/index operations to work
1362 for idx
in self
.indexes
[self
.backend
]:
1363 if self
.backend
== self
.MYSQL_INNODB
:
1364 print _("Creating index %s %s") %(idx
['tab'], idx
['col'])
1365 log
.debug(_("Creating index %s %s") %(idx
['tab'], idx
['col']))
1367 s
= "create index %s on %s(%s)" % (idx
['col'],idx
['tab'],idx
['col'])
1368 self
.get_cursor().execute(s
)
1370 print _("Create index failed:"), str(sys
.exc_info())
1371 elif self
.backend
== self
.PGSQL
:
1372 # mod to use tab_col for index name?
1373 print _("Creating index %s %s") %(idx
['tab'], idx
['col'])
1374 log
.debug(_("Creating index %s %s") %(idx
['tab'], idx
['col']))
1376 s
= "create index %s_%s_idx on %s(%s)" % (idx
['tab'], idx
['col'], idx
['tab'], idx
['col'])
1377 self
.get_cursor().execute(s
)
1379 print _("Create index failed:"), str(sys
.exc_info())
1380 elif self
.backend
== self
.SQLITE
:
1381 print _("Creating index %s %s") %(idx
['tab'], idx
['col'])
1382 log
.debug(_("Creating index %s %s") %(idx
['tab'], idx
['col']))
1384 s
= "create index %s_%s_idx on %s(%s)" % (idx
['tab'], idx
['col'], idx
['tab'], idx
['col'])
1385 self
.get_cursor().execute(s
)
1387 log
.debug(_("Create index failed:"), str(sys
.exc_info()))
1390 if self
.backend
== self
.PGSQL
:
1391 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1393 print _("Error creating indexes:"), str(sys
.exc_value
)
1394 raise FpdbError("Error creating indexes:" + " " + str(sys
.exc_value
) )
1395 #end def createAllIndexes
1397 def dropAllIndexes(self
):
1398 """Drop all standalone indexes (i.e. not including primary keys or foreign keys)
1399 using list of indexes in indexes data structure"""
1400 # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
1401 if self
.backend
== self
.PGSQL
:
1402 self
.connection
.set_isolation_level(0) # allow table/index operations to work
1403 for idx
in self
.indexes
[self
.backend
]:
1404 if self
.backend
== self
.MYSQL_INNODB
:
1405 print (_("Dropping index:"), idx
['tab'], idx
['col'])
1407 self
.get_cursor().execute( "alter table %s drop index %s"
1408 , (idx
['tab'], idx
['col']) )
1410 print _("Drop index failed:"), str(sys
.exc_info())
1411 elif self
.backend
== self
.PGSQL
:
1412 print (_("Dropping index:"), idx
['tab'], idx
['col'])
1413 # mod to use tab_col for index name?
1415 self
.get_cursor().execute( "drop index %s_%s_idx"
1416 % (idx
['tab'],idx
['col']) )
1418 print (_("Drop index failed:"), str(sys
.exc_info()))
1419 elif self
.backend
== self
.SQLITE
:
1420 print (_("Dropping index:"), idx
['tab'], idx
['col'])
1422 self
.get_cursor().execute( "drop index %s_%s_idx"
1423 % (idx
['tab'],idx
['col']) )
1425 print _("Drop index failed:"), str(sys
.exc_info())
1428 if self
.backend
== self
.PGSQL
:
1429 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1430 #end def dropAllIndexes
1432 def createAllForeignKeys(self
):
1433 """Create foreign keys"""
1436 if self
.backend
== self
.PGSQL
:
1437 self
.connection
.set_isolation_level(0) # allow table/index operations to work
1438 c
= self
.get_cursor()
1440 print _("set_isolation_level failed:"), str(sys
.exc_info())
1442 for fk
in self
.foreignKeys
[self
.backend
]:
1443 if self
.backend
== self
.MYSQL_INNODB
:
1444 c
.execute("SELECT constraint_name " +
1445 "FROM information_schema.KEY_COLUMN_USAGE " +
1446 #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
1448 "AND table_name = %s AND column_name = %s " +
1449 "AND referenced_table_name = %s " +
1450 "AND referenced_column_name = %s ",
1451 (fk
['fktab'], fk
['fkcol'], fk
['rtab'], fk
['rcol']) )
1453 #print "afterbulk: cons=", cons
1457 print _("Creating foreign key:"), fk
['fktab'], fk
['fkcol'], "->", fk
['rtab'], fk
['rcol']
1459 c
.execute("alter table " + fk
['fktab'] + " add foreign key ("
1460 + fk
['fkcol'] + ") references " + fk
['rtab'] + "("
1463 print _("Create foreign key failed:"), str(sys
.exc_info())
1464 elif self
.backend
== self
.PGSQL
:
1465 print _("Creating foreign key:"), fk
['fktab'], fk
['fkcol'], "->", fk
['rtab'], fk
['rcol']
1467 c
.execute("alter table " + fk
['fktab'] + " add constraint "
1468 + fk
['fktab'] + '_' + fk
['fkcol'] + '_fkey'
1469 + " foreign key (" + fk
['fkcol']
1470 + ") references " + fk
['rtab'] + "(" + fk
['rcol'] + ")")
1472 print _("Create foreign key failed:"), str(sys
.exc_info())
1477 if self
.backend
== self
.PGSQL
:
1478 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1480 print _("set_isolation_level failed:"), str(sys
.exc_info())
1481 #end def createAllForeignKeys
1483 def dropAllForeignKeys(self
):
1484 """Drop all standalone indexes (i.e. not including primary keys or foreign keys)
1485 using list of indexes in indexes data structure"""
1486 # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
1487 if self
.backend
== self
.PGSQL
:
1488 self
.connection
.set_isolation_level(0) # allow table/index operations to work
1489 c
= self
.get_cursor()
1491 for fk
in self
.foreignKeys
[self
.backend
]:
1492 if self
.backend
== self
.MYSQL_INNODB
:
1493 c
.execute("SELECT constraint_name " +
1494 "FROM information_schema.KEY_COLUMN_USAGE " +
1495 #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
1497 "AND table_name = %s AND column_name = %s " +
1498 "AND referenced_table_name = %s " +
1499 "AND referenced_column_name = %s ",
1500 (fk
['fktab'], fk
['fkcol'], fk
['rtab'], fk
['rcol']) )
1502 #print "preparebulk find fk: cons=", cons
1504 print _("Dropping foreign key:"), cons
[0], fk
['fktab'], fk
['fkcol']
1506 c
.execute("alter table " + fk
['fktab'] + " drop foreign key " + cons
[0])
1508 print _("Warning:"), _("Drop foreign key %s_%s_fkey failed: %s, continuing ...") \
1509 % (fk
['fktab'], fk
['fkcol'], str(sys
.exc_value
).rstrip('\n') )
1510 elif self
.backend
== self
.PGSQL
:
1511 # DON'T FORGET TO RECREATE THEM!!
1512 print _("Dropping foreign key:"), fk
['fktab'], fk
['fkcol']
1514 # try to lock table to see if index drop will work:
1515 # hmmm, tested by commenting out rollback in grapher. lock seems to work but
1516 # then drop still hangs :-( does work in some tests though??
1517 # will leave code here for now pending further tests/enhancement ...
1518 c
.execute("BEGIN TRANSACTION")
1519 c
.execute( "lock table %s in exclusive mode nowait" % (fk
['fktab'],) )
1520 #print "after lock, status:", c.statusmessage
1521 #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
1523 c
.execute("alter table %s drop constraint %s_%s_fkey" % (fk
['fktab'], fk
['fktab'], fk
['fkcol']))
1524 print _("dropped foreign key %s_%s_fkey, continuing ...") % (fk
['fktab'], fk
['fkcol'])
1526 if "does not exist" not in str(sys
.exc_value
):
1527 print _("Warning:"), _("Drop foreign key %s_%s_fkey failed: %s, continuing ...") \
1528 % (fk
['fktab'], fk
['fkcol'], str(sys
.exc_value
).rstrip('\n') )
1529 c
.execute("END TRANSACTION")
1531 print _("Warning:"), _("constraint %s_%s_fkey not dropped: %s, continuing ...") \
1532 % (fk
['fktab'],fk
['fkcol'], str(sys
.exc_value
).rstrip('\n'))
1534 #print _("Only MySQL and Postgres supported so far")
1537 if self
.backend
== self
.PGSQL
:
1538 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1539 #end def dropAllForeignKeys
1542 def fillDefaultData(self
):
1543 c
= self
.get_cursor()
1544 c
.execute("INSERT INTO Settings (version) VALUES (%s);" % (DB_VERSION
))
1546 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('1', 'Full Tilt Poker', 'FT')")
1547 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('2', 'PokerStars', 'PS')")
1548 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('3', 'Everleaf', 'EV')")
1549 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('4', 'Win2day', 'W2')")
1550 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('5', 'OnGame', 'OG')")
1551 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('6', 'UltimateBet', 'UB')")
1552 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('7', 'Betfair', 'BF')")
1553 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('8', 'Absolute', 'AB')")
1554 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('9', 'PartyPoker', 'PP')")
1555 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('10', 'PacificPoker', 'P8')")
1556 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('11', 'Partouche', 'PA')")
1557 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('12', 'Carbon', 'CA')")
1558 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('13', 'PKR', 'PK')")
1559 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('14', 'iPoker', 'IP')")
1560 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('15', 'Winamax', 'WM')")
1561 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('16', 'Everest', 'EP')")
1562 c
.execute("INSERT INTO Sites (id,name,code) VALUES ('17', 'Cake', 'CK')")
1564 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('1', 'ante', 'A')")
1565 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('2', 'small blind', 'SB')")
1566 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('3', 'secondsb', 'SSB')")
1567 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('4', 'big blind', 'BB')")
1568 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('5', 'both', 'SBBB')")
1569 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('6', 'calls', 'C')")
1570 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('7', 'raises', 'R')")
1571 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('8', 'bets', 'B')")
1572 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('9', 'stands pat', 'S')")
1573 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('10', 'folds', 'F')")
1574 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('11', 'checks', 'K')")
1575 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('12', 'discards', 'D')")
1576 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('13', 'bringin', 'I')")
1577 c
.execute("INSERT INTO Actions (id,name,code) VALUES ('14', 'completes', 'P')")
1579 #end def fillDefaultData
1581 def rebuild_indexes(self
, start
=None):
1582 self
.dropAllIndexes()
1583 self
.createAllIndexes()
1584 self
.dropAllForeignKeys()
1585 self
.createAllForeignKeys()
1586 #end def rebuild_indexes
1588 def rebuild_hudcache(self
, h_start
=None, v_start
=None):
1589 """clears hudcache and rebuilds from the individual handsplayers records"""
1593 # derive list of program owner's player ids
1594 self
.hero
= {} # name of program owner indexed by site id
1595 self
.hero_ids
= {'dummy':-53, 'dummy2':-52} # playerid of owner indexed by site id
1596 # make sure at least two values in list
1597 # so that tuple generation creates doesn't use
1599 for site
in self
.config
.get_supported_sites():
1600 result
= self
.get_site_id(site
)
1602 site_id
= result
[0][0]
1603 self
.hero
[site_id
] = self
.config
.supported_sites
[site
].screen_name
1604 p_id
= self
.get_player_id(self
.config
, site
, self
.hero
[site_id
])
1606 self
.hero_ids
[site_id
] = int(p_id
)
1609 h_start
= self
.hero_hudstart_def
1611 v_start
= self
.villain_hudstart_def
1613 if self
.hero_ids
== {}:
1614 where
= "WHERE hp.tourneysPlayersId IS NULL"
1616 where
= "where ((( hp.playerId not in " + str(tuple(self
.hero_ids
.values())) \
1617 + " and h.startTime > '" + v_start
+ "')" \
1618 + " or ( hp.playerId in " + str(tuple(self
.hero_ids
.values())) \
1619 + " and h.startTime > '" + h_start
+ "'))" \
1620 + " AND hp.tourneysPlayersId IS NULL)"
1621 rebuild_sql_cash
= self
.sql
.query
['rebuildHudCache'].replace('<tourney_insert_clause>', "")
1622 rebuild_sql_cash
= rebuild_sql_cash
.replace('<tourney_select_clause>', "")
1623 rebuild_sql_cash
= rebuild_sql_cash
.replace('<tourney_join_clause>', "")
1624 rebuild_sql_cash
= rebuild_sql_cash
.replace('<tourney_group_clause>', "")
1625 rebuild_sql_cash
= rebuild_sql_cash
.replace('<where_clause>', where
)
1626 #print "rebuild_sql_cash:",rebuild_sql_cash
1627 self
.get_cursor().execute(self
.sql
.query
['clearHudCache'])
1628 self
.get_cursor().execute(rebuild_sql_cash
)
1630 if self
.hero_ids
== {}:
1631 where
= "WHERE hp.tourneysPlayersId >= 0"
1633 where
= "where ((( hp.playerId not in " + str(tuple(self
.hero_ids
.values())) \
1634 + " and h.startTime > '" + v_start
+ "')" \
1635 + " or ( hp.playerId in " + str(tuple(self
.hero_ids
.values())) \
1636 + " and h.startTime > '" + h_start
+ "'))" \
1637 + " AND hp.tourneysPlayersId >= 0)"
1638 rebuild_sql_tourney
= self
.sql
.query
['rebuildHudCache'].replace('<tourney_insert_clause>', ",tourneyTypeId")
1639 rebuild_sql_tourney
= rebuild_sql_tourney
.replace('<tourney_select_clause>', ",t.tourneyTypeId")
1640 rebuild_sql_tourney
= rebuild_sql_tourney
.replace('<tourney_join_clause>', """INNER JOIN TourneysPlayers tp ON (tp.id = hp.tourneysPlayersId)
1641 INNER JOIN Tourneys t ON (t.id = tp.tourneyId)""")
1642 rebuild_sql_tourney
= rebuild_sql_tourney
.replace('<tourney_group_clause>', ",t.tourneyTypeId")
1643 rebuild_sql_tourney
= rebuild_sql_tourney
.replace('<where_clause>', where
)
1644 #print "rebuild_sql_tourney:",rebuild_sql_tourney
1646 self
.get_cursor().execute(rebuild_sql_tourney
)
1648 print _("Rebuild hudcache took %.1f seconds") % (time() - stime
,)
1650 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
1651 print _("Error rebuilding hudcache:"), str(sys
.exc_value
)
1653 #end def rebuild_hudcache
1655 def rebuild_sessionscache(self
):
1656 """clears sessionscache and rebuilds from the individual records"""
1658 for site
in self
.config
.get_supported_sites():
1659 result
= self
.get_site_id(site
)
1661 site_id
= result
[0][0]
1662 hero
= self
.config
.supported_sites
[site
].screen_name
1663 p_id
= self
.get_player_id(self
.config
, site
, hero
)
1665 heros
.append(int(p_id
))
1667 rebuildSessionsCache
= self
.sql
.query
['rebuildSessionsCache']
1668 rebuildSessionsCacheSum
= self
.sql
.query
['rebuildSessionsCacheSum']
1673 elif len(heros
) > 0:
1674 where
= str(heros
[0])
1675 where_summary
= str(heros
[0])
1679 where
= where
+ ' OR HandsPlayers.playerId = %s' % str(i
)
1680 where_summary
= where_summary
+ ' OR TourneysPlayers.playerId = %s' % str(i
)
1681 rebuildSessionsCache
= rebuildSessionsCache
.replace('<where_clause>', where
)
1682 rebuildSessionsCacheSum
= rebuildSessionsCacheSum
.replace('<where_clause>', where_summary
)
1684 c
= self
.get_cursor()
1685 c
.execute(self
.sql
.query
['clearSessionsCache'])
1688 sc
, gsc
= {'bk': []}, {'bk': []}
1689 c
.execute(rebuildSessionsCache
)
1692 pids
, game
, pdata
= {}, {}, {}
1696 pids
['pname'] = tmp
[2]
1698 game
['type'] = tmp
[4]
1699 pdata
['pname']['totalProfit'] = tmp
[5]
1700 pdata
['pname']['tourneyTypeId'] = tmp
[6]
1701 pdata
['pname']['street0VPI'] = tmp
[7]
1702 pdata
['pname']['street1Seen'] = tmp
[8]
1704 sc
= self
.prepSessionsCache (id, pids
, startTime
, sc
, heros
, tmp
== None)
1705 gsc
= self
.storeSessionsCache(id, pids
, startTime
, game
, gid
, pdata
, sc
, gsc
, None, heros
, tmp
== None)
1707 for i
, id in sc
.iteritems():
1711 c
.execute("UPDATE Hands SET sessionId = %s, gameSessionId = %s WHERE id = %s", (sid
, gid
, i
))
1715 sc
, gsc
= {'bk': []}, {'bk': []}
1716 c
.execute(rebuildSessionsCacheSum
)
1719 pids
, game
, info
= {}, {}, {}
1722 pids
['pname'] = tmp
[2]
1723 game
['type'] = 'summary'
1724 info
['tourneyTypeId'] = tmp
[3]
1725 info
['winnings'] = {}
1726 info
['winnings']['pname'] = tmp
[4]
1727 info
['winningsCurrency'] = {}
1728 info
['winningsCurrency']['pname'] = tmp
[5]
1729 info
['buyinCurrency'] = tmp
[6]
1730 info
['buyin'] = tmp
[7]
1731 info
['fee'] = tmp
[8]
1733 sc
= self
.prepSessionsCache (id, pids
, startTime
, sc
, heros
, tmp
== None)
1734 gsc
= self
.storeSessionsCache(id, pids
, startTime
, game
, None, info
, sc
, gsc
, None, heros
, tmp
== None)
1738 def get_hero_hudcache_start(self
):
1739 """fetches earliest stylekey from hudcache for one of hero's player ids"""
1742 # derive list of program owner's player ids
1743 self
.hero
= {} # name of program owner indexed by site id
1744 self
.hero_ids
= {'dummy':-53, 'dummy2':-52} # playerid of owner indexed by site id
1745 # make sure at least two values in list
1746 # so that tuple generation creates doesn't use
1748 for site
in self
.config
.get_supported_sites():
1749 result
= self
.get_site_id(site
)
1751 site_id
= result
[0][0]
1752 self
.hero
[site_id
] = self
.config
.supported_sites
[site
].screen_name
1753 p_id
= self
.get_player_id(self
.config
, site
, self
.hero
[site_id
])
1755 self
.hero_ids
[site_id
] = int(p_id
)
1757 q
= self
.sql
.query
['get_hero_hudcache_start'].replace("<playerid_list>", str(tuple(self
.hero_ids
.values())))
1758 c
= self
.get_cursor()
1762 return self
.hero_hudstart_def
1764 return "20"+tmp
[0][1:3] + "-" + tmp
[0][3:5] + "-" + tmp
[0][5:7]
1766 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
1767 print _("Error rebuilding hudcache:"), str(sys
.exc_value
)
1769 #end def get_hero_hudcache_start
1772 def analyzeDB(self
):
1773 """Do whatever the DB can offer to update index/table statistics"""
1775 if self
.backend
== self
.MYSQL_INNODB
:
1777 self
.get_cursor().execute(self
.sql
.query
['analyze'])
1779 print _("Error during analyze:"), str(sys
.exc_value
)
1780 elif self
.backend
== self
.PGSQL
:
1781 self
.connection
.set_isolation_level(0) # allow analyze to work
1783 self
.get_cursor().execute(self
.sql
.query
['analyze'])
1785 print _("Error during analyze:"), str(sys
.exc_value
)
1786 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1788 atime
= time() - stime
1789 log
.info(_("Analyze took %.1f seconds") % (atime
,))
1793 """Do whatever the DB can offer to update index/table statistics"""
1795 if self
.backend
== self
.MYSQL_INNODB
:
1797 self
.get_cursor().execute(self
.sql
.query
['vacuum'])
1799 print _("Error during vacuum:"), str(sys
.exc_value
)
1800 elif self
.backend
== self
.PGSQL
:
1801 self
.connection
.set_isolation_level(0) # allow vacuum to work
1803 self
.get_cursor().execute(self
.sql
.query
['vacuum'])
1805 print _("Error during vacuum:"), str(sys
.exc_value
)
1806 self
.connection
.set_isolation_level(1) # go back to normal isolation level
1808 atime
= time() - stime
1809 print _("Vacuum took %.1f seconds") % (atime
,)
1812 # Start of Hand Writing routines. Idea is to provide a mixture of routines to store Hand data
1813 # however the calling prog requires. Main aims:
1814 # - existing static routines from fpdb_simple just modified
1816 def setThreadId(self
, threadid
):
1817 self
.threadId
= threadid
1819 def acquireLock(self
, wait
=True, retry_time
=.01):
1820 while not self
._has
_lock
:
1821 cursor
= self
.get_cursor()
1822 cursor
.execute(self
.sql
.query
['selectLock'])
1823 record
= cursor
.fetchall()
1826 cursor
.execute(self
.sql
.query
['switchLock'], (True, self
.threadId
))
1828 self
._has
_lock
= True
1831 cursor
.execute(self
.sql
.query
['missedLock'], (1, self
.threadId
))
1837 def releaseLock(self
):
1839 cursor
= self
.get_cursor()
1840 num
= cursor
.execute(self
.sql
.query
['switchLock'], (False, self
.threadId
))
1842 self
._has
_lock
= False
1844 def lock_for_insert(self
):
1845 """Lock tables in MySQL to try to speed inserts up"""
1847 self
.get_cursor().execute(self
.sql
.query
['lockForInsert'])
1849 print _("Error during lock_for_insert:"), str(sys
.exc_value
)
1850 #end def lock_for_insert
1852 ###########################
1854 ###########################
1856 def storeHand(self
, hdata
, hbulk
, doinsert
= False, printdata
= False):
1858 print _("######## Hands ##########")
1860 pp
= pprint
.PrettyPrinter(indent
=4)
1862 print _("###### End Hands ########")
1864 # Tablename can have odd charachers
1865 hdata
['tableName'] = Charset
.to_db_utf8(hdata
['tableName'])
1867 hbulk
.append( [ hdata
['tableName'],
1868 hdata
['siteHandNo'],
1870 hdata
['gametypeId'],
1872 hdata
['gameSessionId'],
1875 datetime
.utcnow(), #importtime
1878 hdata
['playersVpi'],
1879 hdata
['boardcard1'],
1880 hdata
['boardcard2'],
1881 hdata
['boardcard3'],
1882 hdata
['boardcard4'],
1883 hdata
['boardcard5'],
1884 hdata
['runItTwice'],
1885 hdata
['playersAtStreet1'],
1886 hdata
['playersAtStreet2'],
1887 hdata
['playersAtStreet3'],
1888 hdata
['playersAtStreet4'],
1889 hdata
['playersAtShowdown'],
1890 hdata
['street0Raises'],
1891 hdata
['street1Raises'],
1892 hdata
['street2Raises'],
1893 hdata
['street3Raises'],
1894 hdata
['street4Raises'],
1895 hdata
['street1Pot'],
1896 hdata
['street2Pot'],
1897 hdata
['street3Pot'],
1898 hdata
['street4Pot'],
1899 hdata
['showdownPot'],
1908 if hdata
['sc'] and hdata
['gsc']:
1909 h
[4] = hdata
['sc'][id]['id']
1910 h
[5] = hdata
['gsc'][id]['id']
1914 q
= self
.sql
.query
['store_hand']
1915 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
1916 c
= self
.get_cursor()
1917 c
.executemany(q
, hbulk
)
1918 q
= self
.sql
.query
['store_boards']
1919 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
1920 c
= self
.get_cursor()
1921 c
.executemany(q
, bbulk
)
1925 def storeHandsPlayers(self
, hid
, pids
, pdata
, hpbulk
, doinsert
= False, printdata
= False):
1926 #print "DEBUG: %s %s %s" %(hid, pids, pdata)
1929 pp
= pprint
.PrettyPrinter(indent
=4)
1933 hpbulk
.append( ( hid
,
1935 pdata
[p
]['startCash'],
1958 pdata
[p
]['winnings'],
1960 pdata
[p
]['totalProfit'],
1961 pdata
[p
]['street0VPI'],
1962 pdata
[p
]['street1Seen'],
1963 pdata
[p
]['street2Seen'],
1964 pdata
[p
]['street3Seen'],
1965 pdata
[p
]['street4Seen'],
1966 pdata
[p
]['sawShowdown'],
1968 pdata
[p
]['wonAtSD'],
1969 pdata
[p
]['street0Aggr'],
1970 pdata
[p
]['street1Aggr'],
1971 pdata
[p
]['street2Aggr'],
1972 pdata
[p
]['street3Aggr'],
1973 pdata
[p
]['street4Aggr'],
1974 pdata
[p
]['street1CBChance'],
1975 pdata
[p
]['street2CBChance'],
1976 pdata
[p
]['street3CBChance'],
1977 pdata
[p
]['street4CBChance'],
1978 pdata
[p
]['street1CBDone'],
1979 pdata
[p
]['street2CBDone'],
1980 pdata
[p
]['street3CBDone'],
1981 pdata
[p
]['street4CBDone'],
1982 pdata
[p
]['wonWhenSeenStreet1'],
1983 pdata
[p
]['wonWhenSeenStreet2'],
1984 pdata
[p
]['wonWhenSeenStreet3'],
1985 pdata
[p
]['wonWhenSeenStreet4'],
1986 pdata
[p
]['street0Calls'],
1987 pdata
[p
]['street1Calls'],
1988 pdata
[p
]['street2Calls'],
1989 pdata
[p
]['street3Calls'],
1990 pdata
[p
]['street4Calls'],
1991 pdata
[p
]['street0Bets'],
1992 pdata
[p
]['street1Bets'],
1993 pdata
[p
]['street2Bets'],
1994 pdata
[p
]['street3Bets'],
1995 pdata
[p
]['street4Bets'],
1996 pdata
[p
]['position'],
1997 pdata
[p
]['tourneysPlayersIds'],
1998 pdata
[p
]['startCards'],
1999 pdata
[p
]['street0_3BChance'],
2000 pdata
[p
]['street0_3BDone'],
2001 pdata
[p
]['street0_4BChance'],
2002 pdata
[p
]['street0_4BDone'],
2003 pdata
[p
]['street0_C4BChance'],
2004 pdata
[p
]['street0_C4BDone'],
2005 pdata
[p
]['street0_FoldTo3BChance'],
2006 pdata
[p
]['street0_FoldTo3BDone'],
2007 pdata
[p
]['street0_FoldTo4BChance'],
2008 pdata
[p
]['street0_FoldTo4BDone'],
2009 pdata
[p
]['street0_SqueezeChance'],
2010 pdata
[p
]['street0_SqueezeDone'],
2011 pdata
[p
]['raiseToStealChance'],
2012 pdata
[p
]['raiseToStealDone'],
2013 pdata
[p
]['success_Steal'],
2014 pdata
[p
]['otherRaisedStreet0'],
2015 pdata
[p
]['otherRaisedStreet1'],
2016 pdata
[p
]['otherRaisedStreet2'],
2017 pdata
[p
]['otherRaisedStreet3'],
2018 pdata
[p
]['otherRaisedStreet4'],
2019 pdata
[p
]['foldToOtherRaisedStreet0'],
2020 pdata
[p
]['foldToOtherRaisedStreet1'],
2021 pdata
[p
]['foldToOtherRaisedStreet2'],
2022 pdata
[p
]['foldToOtherRaisedStreet3'],
2023 pdata
[p
]['foldToOtherRaisedStreet4'],
2024 pdata
[p
]['raiseFirstInChance'],
2025 pdata
[p
]['raisedFirstIn'],
2026 pdata
[p
]['foldBbToStealChance'],
2027 pdata
[p
]['foldedBbToSteal'],
2028 pdata
[p
]['foldSbToStealChance'],
2029 pdata
[p
]['foldedSbToSteal'],
2030 pdata
[p
]['foldToStreet1CBChance'],
2031 pdata
[p
]['foldToStreet1CBDone'],
2032 pdata
[p
]['foldToStreet2CBChance'],
2033 pdata
[p
]['foldToStreet2CBDone'],
2034 pdata
[p
]['foldToStreet3CBChance'],
2035 pdata
[p
]['foldToStreet3CBDone'],
2036 pdata
[p
]['foldToStreet4CBChance'],
2037 pdata
[p
]['foldToStreet4CBDone'],
2038 pdata
[p
]['street1CheckCallRaiseChance'],
2039 pdata
[p
]['street1CheckCallRaiseDone'],
2040 pdata
[p
]['street2CheckCallRaiseChance'],
2041 pdata
[p
]['street2CheckCallRaiseDone'],
2042 pdata
[p
]['street3CheckCallRaiseChance'],
2043 pdata
[p
]['street3CheckCallRaiseDone'],
2044 pdata
[p
]['street4CheckCallRaiseChance'],
2045 pdata
[p
]['street4CheckCallRaiseDone'],
2046 pdata
[p
]['street0Raises'],
2047 pdata
[p
]['street1Raises'],
2048 pdata
[p
]['street2Raises'],
2049 pdata
[p
]['street3Raises'],
2050 pdata
[p
]['street4Raises']
2054 q
= self
.sql
.query
['store_hands_players']
2055 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2056 c
= self
.get_cursor()
2057 c
.executemany(q
, hpbulk
)
2060 def storeHandsActions(self
, hid
, pids
, adata
, habulk
, doinsert
= False, printdata
= False):
2061 #print "DEBUG: %s %s %s" %(hid, pids, adata)
2063 # This can be used to generate test data. Currently unused
2066 # pp = pprint.PrettyPrinter(indent=4)
2070 habulk
.append( (hid
,
2071 pids
[adata
[a
]['player']],
2073 adata
[a
]['actionNo'],
2074 adata
[a
]['streetActionNo'],
2075 adata
[a
]['actionId'],
2077 adata
[a
]['raiseTo'],
2078 adata
[a
]['amountCalled'],
2079 adata
[a
]['numDiscarded'],
2080 adata
[a
]['cardsDiscarded'],
2085 q
= self
.sql
.query
['store_hands_actions']
2086 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2087 c
= self
.get_cursor()
2088 c
.executemany(q
, habulk
)
2091 def storeHudCache(self
, gid
, pids
, starttime
, pdata
, hcbulk
, doinsert
= False):
2092 """Update cached statistics. If update fails because no record exists, do an insert."""
2094 tz
= datetime
.utcnow() - datetime
.today()
2095 tz_offset
= tz
.seconds
/3600
2096 tz_day_start_offset
= self
.day_start
+ tz_offset
2098 d
= timedelta(hours
=tz_day_start_offset
)
2099 starttime_offset
= starttime
- d
2101 if self
.use_date_in_hudcache
:
2102 styleKey
= datetime
.strftime(starttime_offset
, 'd%y%m%d')
2103 #styleKey = "d%02d%02d%02d" % (hand_start_time.year-2000, hand_start_time.month, hand_start_time.day)
2105 # hard-code styleKey as 'A000000' (all-time cache, no key) for now
2106 styleKey
= 'A000000'
2108 update_hudcache
= self
.sql
.query
['update_hudcache']
2109 update_hudcache
= update_hudcache
.replace('%s', self
.sql
.query
['placeholder'])
2110 insert_hudcache
= self
.sql
.query
['insert_hudcache']
2111 insert_hudcache
= insert_hudcache
.replace('%s', self
.sql
.query
['placeholder'])
2113 #print "DEBUG: %s %s %s" %(hid, pids, pdata)
2116 #NOTE: Insert new stats at right place because SQL needs strict order
2118 line
.append(1) # HDs
2119 line
.append(pdata
[p
]['street0VPI'])
2120 line
.append(pdata
[p
]['street0Aggr'])
2121 line
.append(pdata
[p
]['street0_3BChance'])
2122 line
.append(pdata
[p
]['street0_3BDone'])
2123 line
.append(pdata
[p
]['street0_4BChance'])
2124 line
.append(pdata
[p
]['street0_4BDone'])
2125 line
.append(pdata
[p
]['street0_C4BChance'])
2126 line
.append(pdata
[p
]['street0_C4BDone'])
2127 line
.append(pdata
[p
]['street0_FoldTo3BChance'])
2128 line
.append(pdata
[p
]['street0_FoldTo3BDone'])
2129 line
.append(pdata
[p
]['street0_FoldTo4BChance'])
2130 line
.append(pdata
[p
]['street0_FoldTo4BDone'])
2131 line
.append(pdata
[p
]['street0_SqueezeChance'])
2132 line
.append(pdata
[p
]['street0_SqueezeDone'])
2133 line
.append(pdata
[p
]['raiseToStealChance'])
2134 line
.append(pdata
[p
]['raiseToStealDone'])
2135 line
.append(pdata
[p
]['success_Steal'])
2136 line
.append(pdata
[p
]['street1Seen'])
2137 line
.append(pdata
[p
]['street2Seen'])
2138 line
.append(pdata
[p
]['street3Seen'])
2139 line
.append(pdata
[p
]['street4Seen'])
2140 line
.append(pdata
[p
]['sawShowdown'])
2141 line
.append(pdata
[p
]['street1Aggr'])
2142 line
.append(pdata
[p
]['street2Aggr'])
2143 line
.append(pdata
[p
]['street3Aggr'])
2144 line
.append(pdata
[p
]['street4Aggr'])
2145 line
.append(pdata
[p
]['otherRaisedStreet0'])
2146 line
.append(pdata
[p
]['otherRaisedStreet1'])
2147 line
.append(pdata
[p
]['otherRaisedStreet2'])
2148 line
.append(pdata
[p
]['otherRaisedStreet3'])
2149 line
.append(pdata
[p
]['otherRaisedStreet4'])
2150 line
.append(pdata
[p
]['foldToOtherRaisedStreet0'])
2151 line
.append(pdata
[p
]['foldToOtherRaisedStreet1'])
2152 line
.append(pdata
[p
]['foldToOtherRaisedStreet2'])
2153 line
.append(pdata
[p
]['foldToOtherRaisedStreet3'])
2154 line
.append(pdata
[p
]['foldToOtherRaisedStreet4'])
2155 line
.append(pdata
[p
]['wonWhenSeenStreet1'])
2156 line
.append(pdata
[p
]['wonWhenSeenStreet2'])
2157 line
.append(pdata
[p
]['wonWhenSeenStreet3'])
2158 line
.append(pdata
[p
]['wonWhenSeenStreet4'])
2159 line
.append(pdata
[p
]['wonAtSD'])
2160 line
.append(pdata
[p
]['raiseFirstInChance'])
2161 line
.append(pdata
[p
]['raisedFirstIn'])
2162 line
.append(pdata
[p
]['foldBbToStealChance'])
2163 line
.append(pdata
[p
]['foldedBbToSteal'])
2164 line
.append(pdata
[p
]['foldSbToStealChance'])
2165 line
.append(pdata
[p
]['foldedSbToSteal'])
2166 line
.append(pdata
[p
]['street1CBChance'])
2167 line
.append(pdata
[p
]['street1CBDone'])
2168 line
.append(pdata
[p
]['street2CBChance'])
2169 line
.append(pdata
[p
]['street2CBDone'])
2170 line
.append(pdata
[p
]['street3CBChance'])
2171 line
.append(pdata
[p
]['street3CBDone'])
2172 line
.append(pdata
[p
]['street4CBChance'])
2173 line
.append(pdata
[p
]['street4CBDone'])
2174 line
.append(pdata
[p
]['foldToStreet1CBChance'])
2175 line
.append(pdata
[p
]['foldToStreet1CBDone'])
2176 line
.append(pdata
[p
]['foldToStreet2CBChance'])
2177 line
.append(pdata
[p
]['foldToStreet2CBDone'])
2178 line
.append(pdata
[p
]['foldToStreet3CBChance'])
2179 line
.append(pdata
[p
]['foldToStreet3CBDone'])
2180 line
.append(pdata
[p
]['foldToStreet4CBChance'])
2181 line
.append(pdata
[p
]['foldToStreet4CBDone'])
2182 line
.append(pdata
[p
]['totalProfit'])
2183 line
.append(pdata
[p
]['street1CheckCallRaiseChance'])
2184 line
.append(pdata
[p
]['street1CheckCallRaiseDone'])
2185 line
.append(pdata
[p
]['street2CheckCallRaiseChance'])
2186 line
.append(pdata
[p
]['street2CheckCallRaiseDone'])
2187 line
.append(pdata
[p
]['street3CheckCallRaiseChance'])
2188 line
.append(pdata
[p
]['street3CheckCallRaiseDone'])
2189 line
.append(pdata
[p
]['street4CheckCallRaiseChance'])
2190 line
.append(pdata
[p
]['street4CheckCallRaiseDone'])
2191 line
.append(pdata
[p
]['street0Calls'])
2192 line
.append(pdata
[p
]['street1Calls'])
2193 line
.append(pdata
[p
]['street2Calls'])
2194 line
.append(pdata
[p
]['street3Calls'])
2195 line
.append(pdata
[p
]['street4Calls'])
2196 line
.append(pdata
[p
]['street0Bets'])
2197 line
.append(pdata
[p
]['street1Bets'])
2198 line
.append(pdata
[p
]['street2Bets'])
2199 line
.append(pdata
[p
]['street3Bets'])
2200 line
.append(pdata
[p
]['street4Bets'])
2201 line
.append(pdata
[p
]['street0Raises'])
2202 line
.append(pdata
[p
]['street1Raises'])
2203 line
.append(pdata
[p
]['street2Raises'])
2204 line
.append(pdata
[p
]['street3Raises'])
2205 line
.append(pdata
[p
]['street4Raises'])
2208 hc
['gametypeId'] = gid
2209 hc
['playerId'] = pids
[p
]
2210 hc
['activeSeats'] = len(pids
)
2211 pos
= {'B':'B', 'S':'S', 0:'D', 1:'C', 2:'M', 3:'M', 4:'M', 5:'E', 6:'E', 7:'E', 8:'E', 9:'E' }
2212 hc
['position'] = pos
[pdata
[p
]['position']]
2213 hc
['tourneyTypeId'] = pdata
[p
]['tourneyTypeId']
2214 hc
['styleKey'] = styleKey
2215 for i
in range(len(line
)):
2216 if line
[i
]==True: line
[i
] = 1
2217 if line
[i
]==False: line
[i
] = 0
2219 hc
['game'] = [hc
['gametypeId']
2223 ,hc
['tourneyTypeId']
2230 #print h['game']==b['game'], h['game'], b['game']
2231 if h
['game']==b
['game']:
2232 b
['line'] = [sum(l
) for l
in zip(b
['line'], h
['line'])]
2234 if not match
: hcbulk
.append(h
)
2238 c
= self
.get_cursor()
2240 row
= hc
['line'] + hc
['game']
2241 num
= c
.execute(update_hudcache
, row
)
2242 # Try to do the update first. Do insert it did not work
2243 if ((self
.backend
== self
.PGSQL
and c
.statusmessage
!= "UPDATE 1")
2244 or (self
.backend
== self
.MYSQL_INNODB
and num
== 0)
2245 or (self
.backend
== self
.SQLITE
and num
.rowcount
== 0)):
2246 inserts
.append(hc
['game'] + hc
['line'])
2247 #row = hc['game'] + hc['line']
2248 #num = c.execute(insert_hudcache, row)
2249 #print "DEBUG: Successfully(?: %s) updated HudCacho using INSERT" % num
2251 #print "DEBUG: Successfully updated HudCacho using UPDATE"
2254 c
.executemany(insert_hudcache
, inserts
)
2258 def prepSessionsCache(self
, hid
, pids
, startTime
, sc
, heros
, doinsert
= False):
2259 """Update cached sessions. If no record exists, do an insert"""
2260 THRESHOLD
= timedelta(seconds
=int(self
.sessionTimeout
* 60))
2262 select_prepSC
= self
.sql
.query
['select_prepSC'].replace('%s', self
.sql
.query
['placeholder'])
2263 update_Hands_sid
= self
.sql
.query
['update_Hands_sid'].replace('%s', self
.sql
.query
['placeholder'])
2264 update_SC_sid
= self
.sql
.query
['update_SC_sid'].replace('%s', self
.sql
.query
['placeholder'])
2265 update_prepSC
= self
.sql
.query
['update_prepSC'].replace('%s', self
.sql
.query
['placeholder'])
2267 #print "DEBUG: %s %s %s" %(hid, pids, pdata)
2269 for p
, id in pids
.iteritems():
2271 hand
['startTime'] = startTime
.replace(tzinfo
=None)
2276 lower
= hand
['startTime']-THRESHOLD
2277 upper
= hand
['startTime']+THRESHOLD
2278 for i
in range(len(sc
['bk'])):
2279 if ((lower
<= sc
['bk'][i
]['sessionEnd'])
2280 and (upper
>= sc
['bk'][i
]['sessionStart'])):
2281 if ((hand
['startTime'] <= sc
['bk'][i
]['sessionEnd'])
2282 and (hand
['startTime'] >= sc
['bk'][i
]['sessionStart'])):
2284 elif hand
['startTime'] < sc
['bk'][i
]['sessionStart']:
2285 sc
['bk'][i
]['sessionStart'] = hand
['startTime']
2287 elif hand
['startTime'] > sc
['bk'][i
]['sessionEnd']:
2288 sc
['bk'][i
]['sessionEnd'] = hand
['startTime']
2292 sc
['bk'][id]['ids'].append(hid
)
2294 if sc
['bk'][id[0]]['startTime'] < sc
['bk'][id[1]]['startTime']:
2295 sc
['bk'][id[0]]['endTime'] = sc
['bk'][id[1]]['endTime']
2297 sc
['bk'][id[0]]['startTime'] = sc
['bk'][id[1]]['startTime']
2300 sc
['bk'][id]['ids'].append(hid
)
2303 hand
['sessionStart'] = hand
['startTime']
2304 hand
['sessionEnd'] = hand
['startTime']
2306 hand
['ids'].append(hid
)
2307 sc
['bk'].append(hand
)
2310 c
= self
.get_cursor()
2311 c
.execute("SELECT max(sessionId) FROM SessionsCache")
2312 id = c
.fetchone()[0]
2315 for i
in range(len(sc
['bk'])):
2316 lower
= sc
['bk'][i
]['sessionStart'] - THRESHOLD
2317 upper
= sc
['bk'][i
]['sessionEnd'] + THRESHOLD
2318 c
.execute(select_prepSC
, (lower
, upper
))
2319 r
= self
.fetchallDict(c
)
2322 start
, end
, update
= r
[0]['sessionStart'], r
[0]['sessionEnd'], False
2323 if sc
['bk'][i
]['sessionStart'] < start
:
2324 start
, update
= sc
['bk'][i
]['sessionStart'], True
2325 if sc
['bk'][i
]['sessionEnd'] > end
:
2326 end
, update
= sc
['bk'][i
]['sessionEnd'], True
2328 c
.execute(update_prepSC
, [start
, end
, r
[0]['id']])
2329 for h
in sc
['bk'][i
]['ids']:
2330 sc
[h
] = {'id': r
[0]['id'], 'data': [start
, end
]}
2333 start
, end
, merge
= None, None, []
2335 r
.append(sc
['bk'][i
])
2338 if start
> n
['sessionStart']:
2339 start
= n
['sessionStart']
2340 else: start
= n
['sessionStart']
2342 if end
< n
['sessionEnd']:
2343 end
= n
['sessionEnd']
2344 else: end
= n
['sessionEnd']
2347 if n
['id'] in merge
: continue
2348 merge
.append(n
['id'])
2349 c
.execute(update_Hands_sid
, (sid
, n
['id']))
2350 c
.execute(update_SC_sid
, (start
, end
, sid
, n
['id']))
2352 for k
, v
in sc
.iteritems():
2353 if k
!='bk' and v
['id'] in merge
:
2355 for h
in sc
['bk'][i
]['ids']:
2356 sc
[h
] = {'id': sid
, 'data': [start
, end
]}
2359 start
= sc
['bk'][i
]['sessionStart']
2360 end
= sc
['bk'][i
]['sessionEnd']
2361 for h
in sc
['bk'][i
]['ids']:
2362 sc
[h
] = {'id': sid
, 'data': [start
, end
]}
2366 def storeSessionsCache(self
, hid
, pids
, startTime
, game
, gid
, pdata
, sc
, gsc
, tz
, heros
, doinsert
= False):
2367 """Update cached sessions. If no record exists, do an insert"""
2369 tz_dt
= datetime
.utcnow() - datetime
.today()
2370 tz
= tz_dt
.seconds
/3600
2372 THRESHOLD
= timedelta(seconds
=int(self
.sessionTimeout
* 60))
2373 local
= startTime
+ timedelta(hours
=int(tz
))
2374 date
= "d%02d%02d%02d" % (local
.year
- 2000, local
.month
, local
.day
)
2376 select_SC
= self
.sql
.query
['select_SC'].replace('%s', self
.sql
.query
['placeholder'])
2377 update_SC
= self
.sql
.query
['update_SC'].replace('%s', self
.sql
.query
['placeholder'])
2378 insert_SC
= self
.sql
.query
['insert_SC'].replace('%s', self
.sql
.query
['placeholder'])
2379 delete_SC
= self
.sql
.query
['delete_SC'].replace('%s', self
.sql
.query
['placeholder'])
2380 update_Hands_gsid
= self
.sql
.query
['update_Hands_gsid'].replace('%s', self
.sql
.query
['placeholder'])
2382 #print "DEBUG: %s %s %s" %(hid, pids, pdata)
2384 for p
, id in pids
.iteritems():
2387 hand
['totalProfit'] = 0
2388 hand
['playerId'] = id
2389 hand
['gametypeId'] = None
2391 hand
['startTime'] = startTime
.replace(tzinfo
=None)
2393 hand
['tourneys'] = 0
2394 hand
['tourneyTypeId'] = None
2397 if (game
['type']=='summary'):
2398 hand
['type'] = 'tour'
2399 hand
['tourneys'] = 1
2400 hand
['tourneyTypeId'] = pdata
['tourneyTypeId']
2402 if pdata
['buyinCurrency'] == pdata
['winningsCurrency'][p
]:
2403 hand
['totalProfit'] = pdata
['winnings'][p
] - (pdata
['buyin'] + pdata
['fee'])
2404 else: hand
['totalProfit'] = pdata
['winnings'][p
]
2405 elif (game
['type']=='ring'):
2406 hand
['type'] = game
['type']
2407 hand
['gametypeId'] = gid
2408 if pdata
[p
]['street0VPI'] or pdata
[p
]['street1Seen']:
2410 hand
['totalProfit'] = pdata
[p
]['totalProfit']
2412 elif (game
['type']=='tour'):
2413 hand
['type'] = game
['type']
2414 hand
['tourneyTypeId'] = pdata
[p
]['tourneyTypeId']
2415 if pdata
[p
]['street0VPI'] or pdata
[p
]['street1Seen']:
2421 lower
= hand
['startTime']-THRESHOLD
2422 upper
= hand
['startTime']+THRESHOLD
2423 for i
in range(len(gsc
['bk'])):
2424 if ((hand
['date'] == gsc
['bk'][i
]['date'])
2425 and (hand
['gametypeId'] == gsc
['bk'][i
]['gametypeId'])
2426 and (hand
['playerId'] == gsc
['bk'][i
]['playerId'])
2427 and (hand
['tourneyTypeId'] == gsc
['bk'][i
]['tourneyTypeId'])
2428 and (hand
['played'] == gsc
['bk'][i
]['played'])):
2429 if ((lower
<= gsc
['bk'][i
]['gameEnd'])
2430 and (upper
>= gsc
['bk'][i
]['gameStart'])):
2431 if ((hand
['startTime'] <= gsc
['bk'][i
]['gameEnd'])
2432 and (hand
['startTime'] >= gsc
['bk'][i
]['gameStart'])):
2433 gsc
['bk'][i
]['hands'] += hand
['hands']
2434 gsc
['bk'][i
]['tourneys'] += hand
['tourneys']
2435 gsc
['bk'][i
]['totalProfit'] += hand
['totalProfit']
2436 elif hand
['startTime'] < gsc
['bk'][i
]['gameStart']:
2437 gsc
['bk'][i
]['hands'] += hand
['hands']
2438 gsc
['bk'][i
]['tourneys'] += hand
['tourneys']
2439 gsc
['bk'][i
]['totalProfit'] += hand
['totalProfit']
2440 gsc
['bk'][i
]['gameStart'] = hand
['startTime']
2441 elif hand
['startTime'] > gsc
['bk'][i
]['gameEnd']:
2442 gsc
['bk'][i
]['hands'] += hand
['hands']
2443 gsc
['bk'][i
]['tourneys'] += hand
['tourneys']
2444 gsc
['bk'][i
]['totalProfit'] += hand
['totalProfit']
2445 gsc
['bk'][i
]['gameEnd'] = hand
['startTime']
2448 gsc
['bk'][id[0]]['ids'].append(hid
)
2450 if gsc
['bk'][id[0]]['gameStart'] < gsc
['bk'][id[1]]['gameStart']:
2451 gsc
['bk'][id[0]]['gameEnd'] = gsc
['bk'][id[1]]['gameEnd']
2452 else: gsc
['bk'][id[0]]['gameStart'] = gsc
['bk'][id[1]]['gameStart']
2453 gsc
['bk'][id[0]]['hands'] += hand
['hands']
2454 gsc
['bk'][id[0]]['tourneys'] += hand
['tourneys']
2455 gsc
['bk'][id[0]]['totalProfit'] += hand
['totalProfit']
2456 gsc
['bk'].pop
[id[1]]
2457 gsc
['bk'][id[0]]['ids'].append(hid
)
2459 hand
['gameStart'] = hand
['startTime']
2460 hand
['gameEnd'] = hand
['startTime']
2462 hand
['ids'].append(hid
)
2463 gsc
['bk'].append(hand
)
2466 c
= self
.get_cursor()
2467 for i
in range(len(gsc
['bk'])):
2468 hid
= gsc
['bk'][i
]['hid']
2469 sid
, start
, end
= sc
[hid
]['id'], sc
[hid
]['data'][0], sc
[hid
]['data'][1]
2470 lower
= gsc
['bk'][i
]['gameStart'] - THRESHOLD
2471 upper
= gsc
['bk'][i
]['gameEnd'] + THRESHOLD
2472 game
= [gsc
['bk'][i
]['date']
2473 ,gsc
['bk'][i
]['type']
2474 ,gsc
['bk'][i
]['gametypeId']
2475 ,gsc
['bk'][i
]['tourneyTypeId']
2476 ,gsc
['bk'][i
]['playerId']
2477 ,gsc
['bk'][i
]['played']]
2478 row
= [lower
, upper
] + game
2479 c
.execute(select_SC
, row
)
2480 r
= self
.fetchallDict(c
)
2483 gstart
, gend
= r
[0]['gameStart'], r
[0]['gameEnd']
2484 if gsc
['bk'][i
]['gameStart'] < gstart
:
2485 gstart
= gsc
['bk'][i
]['gameStart']
2486 if gsc
['bk'][i
]['gameEnd'] > gend
:
2487 gend
= gsc
['bk'][i
]['gameEnd']
2488 row
= [start
, end
, gstart
, gend
2489 ,gsc
['bk'][i
]['hands']
2490 ,gsc
['bk'][i
]['tourneys']
2491 ,gsc
['bk'][i
]['totalProfit']
2493 c
.execute(update_SC
, row
)
2494 for h
in gsc
['bk'][i
]['ids']: gsc
[h
] = {'id': r
[0]['id']}
2497 gstart
, gend
, hands
, tourneys
, totalProfit
, delete
= None, None, 0, 0, 0, []
2498 for n
in r
: delete
.append(n
['id'])
2500 for d
in delete
: c
.execute(delete_SC
, d
)
2501 r
.append(gsc
['bk'][i
])
2504 if gstart
> n
['gameStart']:
2505 gstart
= n
['gameStart']
2506 else: gstart
= n
['gameStart']
2508 if gend
< n
['gameEnd']:
2510 else: gend
= n
['gameEnd']
2512 tourneys
+= n
['tourneys']
2513 totalProfit
+= n
['totalProfit']
2514 row
= [start
, end
, gstart
, gend
, sid
] + game
+ [hands
, tourneys
, totalProfit
]
2515 c
.execute(insert_SC
, row
)
2516 gsid
= self
.get_last_insert_id(c
)
2517 for h
in gsc
['bk'][i
]['ids']: gsc
[h
] = {'id': gsid
}
2519 c
.execute(update_Hands_gsid
, (gsid
, m
))
2522 gstart
= gsc
['bk'][i
]['gameStart']
2523 gend
= gsc
['bk'][i
]['gameEnd']
2524 hands
= gsc
['bk'][i
]['hands']
2525 tourneys
= gsc
['bk'][i
]['tourneys']
2526 totalProfit
= gsc
['bk'][i
]['totalProfit']
2527 row
= [start
, end
, gstart
, gend
, sid
] + game
+ [hands
, tourneys
, totalProfit
]
2528 c
.execute(insert_SC
, row
)
2529 gsid
= self
.get_last_insert_id(c
)
2530 for h
in gsc
['bk'][i
]['ids']: gsc
[h
] = {'id': gsid
}
2532 # Something bad happened
2538 def getSqlGameTypeId(self
, siteid
, game
, printdata
= False):
2539 if(self
.gtcache
== None):
2540 self
.gtcache
= LambdaDict(lambda key
:self
.insertGameTypes(key
[0], key
[1]))
2542 self
.gtprintdata
= printdata
2544 if game
['category'] in ['studhilo', 'omahahilo']:
2546 elif game
['category'] in ['razz','27_3draw','badugi', '27_1draw']:
2549 gtinfo
= (siteid
, game
['type'], game
['category'], game
['limitType'], game
['currency'],
2550 game
['mix'], int(Decimal(game
['sb'])*100), int(Decimal(game
['bb'])*100),
2551 game
['maxSeats'], game
['ante'])
2553 gtinsert
= (siteid
, game
['currency'], game
['type'], game
['base'], game
['category'], game
['limitType'], hilo
,
2554 game
['mix'], int(Decimal(game
['sb'])*100), int(Decimal(game
['bb'])*100),
2555 int(Decimal(game
['bb'])*100), int(Decimal(game
['bb'])*200), game
['maxSeats'], game
['ante'])
2557 result
= self
.gtcache
[(gtinfo
, gtinsert
)]
2558 # NOTE: Using the LambdaDict does the same thing as:
2559 #if player in self.pcache:
2560 # #print "DEBUG: cachehit"
2563 # self.pcache[player] = self.insertPlayer(player, siteid)
2564 #result[player] = self.pcache[player]
2568 def insertGameTypes(self
, gtinfo
, gtinsert
):
2570 c
= self
.get_cursor()
2571 q
= self
.sql
.query
['getGametypeNL']
2572 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2573 c
.execute(q
, gtinfo
)
2577 if self
.gtprintdata
:
2578 print _("######## Gametype ##########")
2580 pp
= pprint
.PrettyPrinter(indent
=4)
2582 print _("###### End Gametype ########")
2584 c
.execute(self
.sql
.query
['insertGameTypes'], gtinsert
)
2585 result
= self
.get_last_insert_id(c
)
2590 def storeFile(self
, fdata
):
2591 q
= self
.sql
.query
['store_file']
2592 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2593 c
= self
.get_cursor()
2595 id = self
.get_last_insert_id(c
)
2598 def updateFile(self
, fdata
):
2599 q
= self
.sql
.query
['update_file']
2600 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2601 c
= self
.get_cursor()
2604 def getHeroIds(self
, pids
, sitename
):
2605 #Grab playerIds using hero names in HUD_Config.xml
2607 # derive list of program owner's player ids
2608 hero
= {} # name of program owner indexed by site id
2610 # make sure at least two values in list
2611 # so that tuple generation creates doesn't use
2613 for site
in self
.config
.get_supported_sites():
2614 hero
= self
.config
.supported_sites
[site
].screen_name
2615 for n
, v
in pids
.iteritems():
2616 if n
== hero
and sitename
== site
:
2620 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
2621 #print _("Error aquiring hero ids:"), str(sys.exc_value)
2624 def fetchallDict(self
, cursor
):
2625 data
= cursor
.fetchall()
2626 if not data
: return []
2627 desc
= cursor
.description
2628 results
= [0]*len(data
)
2629 for i
in range(len(data
)):
2631 for n
in range(len(desc
)):
2633 results
[i
][name
] = data
[i
][n
]
2636 def nextHandId(self
):
2637 c
= self
.get_cursor()
2638 c
.execute("SELECT max(id) FROM Hands")
2639 id = c
.fetchone()[0]
2644 def isDuplicate(self
, gametypeID
, siteHandNo
):
2646 c
= self
.get_cursor()
2647 c
.execute(self
.sql
.query
['isAlreadyInDB'], (gametypeID
, siteHandNo
))
2648 result
= c
.fetchall()
2653 #################################
2654 # Finish of NEWIMPORT CODE
2655 #################################
2657 # read HandToWrite objects from q and insert into database
2658 def insert_queue_hands(self
, q
, maxwait
=10, commitEachHand
=True):
2659 n
,fails
,maxTries
,firstWait
= 0,0,4,0.1
2664 h
= q
.get(True) # (True,maxWait) has probs if 1st part of import is all dups
2666 # Queue.Empty exception thrown if q was empty for
2667 # if q.empty() also possible - no point if testing for Queue.Empty exception
2668 # maybe increment a counter and only break after a few times?
2669 # could also test threading.active_count() or look through threading.enumerate()
2670 # so break immediately if no threads, but count up to X exceptions if a writer
2671 # thread is still alive???
2672 print _("queue empty too long - writer stopping ...")
2675 print _("writer stopping, error reading queue:"), str(sys
.exc_info())
2677 #print "got hand", str(h.get_finished())
2679 tries
,wait
,again
= 0,firstWait
,True
2682 again
= False # set this immediately to avoid infinite loops!
2683 if h
.get_finished():
2684 # all items on queue processed
2687 self
.store_the_hand(h
)
2688 # optional commit, could be every hand / every N hands / every time a
2689 # commit message received?? mark flag to indicate if commits outstanding
2694 #print "iqh store error", sys.exc_value # debug
2696 if re
.search('deadlock', str(sys
.exc_info()[1]), re
.I
):
2697 # deadlocks only a problem if hudcache is being updated
2699 if tries
< maxTries
and wait
< 5: # wait < 5 just to make sure
2700 print _("deadlock detected - trying again ...")
2705 print _("Too many deadlocks - failed to store hand"), h
.get_siteHandNo()
2708 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
2709 print _("***Error storing hand:"), err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
2710 # finished trying to store hand
2712 # always reduce q count, whether or not this hand was saved ok
2719 print _("db writer finished: stored %d hands (%d fails) in %.1f seconds") % (n
, fails
, time()-t0
)
2720 # end def insert_queue_hands():
2722 def send_finish_msg(self
, q
):
2724 h
= HandToWrite(True)
2727 err
= traceback
.extract_tb(sys
.exc_info()[2])[-1]
2728 print _("***Error sending finish:"), err
[2]+"("+str(err
[1])+"): "+str(sys
.exc_info()[1])
2729 # end def send_finish_msg():
2731 def getSqlTourneyTypeIDs(self
, hand
):
2732 if(self
.ttcache
== None):
2733 self
.ttcache
= LambdaDict(lambda key
:self
.insertTourneyType(key
[0], key
[1], key
[2], key
[3]))
2735 tourneydata
= (hand
.siteId
, hand
.buyinCurrency
, hand
.buyin
, hand
.fee
, hand
.gametype
['category'],
2736 hand
.gametype
['limitType'], hand
.maxseats
, hand
.isKO
,
2737 hand
.isRebuy
, hand
.isAddOn
, hand
.speed
, hand
.isShootout
, hand
.isMatrix
)
2739 tourneyInsert
= (hand
.siteId
, hand
.buyinCurrency
, hand
.buyin
, hand
.fee
, hand
.gametype
['category'],
2740 hand
.gametype
['limitType'], hand
.maxseats
,
2741 hand
.buyInChips
, hand
.isKO
, hand
.koBounty
, hand
.isRebuy
,
2742 hand
.isAddOn
, hand
.speed
, hand
.isShootout
, hand
.isMatrix
, hand
.added
, hand
.addedCurrency
)
2744 result
= self
.ttcache
[(hand
.tourNo
, hand
.siteId
, tourneydata
, tourneyInsert
)]
2745 # NOTE: Using the LambdaDict does the same thing as:
2746 #if player in self.pcache:
2747 # #print "DEBUG: cachehit"
2750 # self.pcache[player] = self.insertPlayer(player, siteid)
2751 #result[player] = self.pcache[player]
2755 def insertTourneyType(self
, tournNo
, siteId
, tourneydata
, tourneyInsert
):
2757 c
= self
.get_cursor()
2758 q
= self
.sql
.query
['getTourneyTypeIdByTourneyNo']
2759 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2761 #NOTE/FIXME?: MySQL has ON DUPLICATE KEY UPDATE
2763 # INSERT INTO `tags` (`tag`, `count`)
2765 # ON DUPLICATE KEY UPDATE `count`=`count`+1;
2768 #print "DEBUG: name: %s site: %s" %(name, site_id)
2770 c
.execute (q
, (tournNo
, siteId
))
2773 if (tmp
== None): #new player
2774 c
.execute (self
.sql
.query
['getTourneyTypeId'].replace('%s', self
.sql
.query
['placeholder']), tourneydata
)
2778 except TypeError: #this means we need to create a new entry
2779 c
.execute (self
.sql
.query
['insertTourneyType'].replace('%s', self
.sql
.query
['placeholder']), tourneyInsert
)
2780 #Get last id might be faster here.
2781 #c.execute ("SELECT id FROM Players WHERE name=%s", (name,))
2782 result
= self
.get_last_insert_id(c
)
2787 def createOrUpdateTourney(self
, summary
):
2788 cursor
= self
.get_cursor()
2789 q
= self
.sql
.query
['getTourneyByTourneyNo'].replace('%s', self
.sql
.query
['placeholder'])
2790 cursor
.execute(q
, (summary
.siteId
, summary
.tourNo
))
2792 columnNames
=[desc
[0] for desc
in cursor
.description
]
2793 result
=cursor
.fetchone()
2796 expectedValues
= ('comment', 'tourneyName', 'matrixIdProcessed', 'totalRebuyCount', 'totalAddOnCount',
2797 'prizepool', 'startTime', 'entries', 'commentTs', 'endTime')
2799 resultDict
= dict(zip(columnNames
, result
))
2801 tourneyId
= resultDict
["id"]
2802 for ev
in expectedValues
:
2803 if getattr(summary
, ev
)==None and resultDict
[ev
]!=None:#DB has this value but object doesnt, so update object
2804 setattr(summary
, ev
, resultDict
[ev
])
2805 elif getattr(summary
, ev
)!=None and resultDict
[ev
]==None:#object has this value but DB doesnt, so update DB
2807 #elif ev=="startTime":
2808 # if (resultDict[ev] < summary.startTime):
2809 # summary.startTime=resultDict[ev]
2811 q
= self
.sql
.query
['updateTourney'].replace('%s', self
.sql
.query
['placeholder'])
2812 row
= (summary
.entries
, summary
.prizepool
, summary
.startTime
, summary
.endTime
, summary
.tourneyName
,
2813 summary
.matrixIdProcessed
, summary
.totalRebuyCount
, summary
.totalAddOnCount
, summary
.comment
,
2814 summary
.commentTs
, tourneyId
2816 cursor
.execute(q
, row
)
2818 cursor
.execute (self
.sql
.query
['insertTourney'].replace('%s', self
.sql
.query
['placeholder']),
2819 (summary
.tourneyTypeId
, summary
.tourNo
, summary
.entries
, summary
.prizepool
, summary
.startTime
,
2820 summary
.endTime
, summary
.tourneyName
, summary
.matrixIdProcessed
, summary
.totalRebuyCount
, summary
.totalAddOnCount
))
2821 tourneyId
= self
.get_last_insert_id(cursor
)
2823 #end def createOrUpdateTourney
2825 def getSqlTourneyIDs(self
, hand
):
2826 if(self
.tcache
== None):
2827 self
.tcache
= LambdaDict(lambda key
:self
.insertTourney(key
[0], key
[1], key
[2], key
[3]))
2829 result
= self
.tcache
[(hand
.siteId
, hand
.tourNo
, hand
.tourneyTypeId
, hand
.startTime
)]
2830 # NOTE: Using the LambdaDict does the same thing as:
2831 #if player in self.pcache:
2832 # #print "DEBUG: cachehit"
2835 # self.pcache[player] = self.insertPlayer(player, siteid)
2836 #result[player] = self.pcache[player]
2840 def insertTourney(self
, siteId
, tourNo
, tourneyTypeId
, startTime
):
2842 c
= self
.get_cursor()
2843 q
= self
.sql
.query
['getTourneyByTourneyNo']
2844 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2846 #NOTE/FIXME?: MySQL has ON DUPLICATE KEY UPDATE
2848 # INSERT INTO `tags` (`tag`, `count`)
2850 # ON DUPLICATE KEY UPDATE `count`=`count`+1;
2853 #print "DEBUG: name: %s site: %s" %(name, site_id)
2855 c
.execute (q
, (siteId
, tourNo
))
2858 if (tmp
== None): #new player
2859 c
.execute (self
.sql
.query
['insertTourney'].replace('%s', self
.sql
.query
['placeholder']),
2860 (tourneyTypeId
, tourNo
, None, None,
2861 startTime
, None, None, None, None, None))
2862 #Get last id might be faster here.
2863 #c.execute ("SELECT id FROM Players WHERE name=%s", (name,))
2864 result
= self
.get_last_insert_id(c
)
2869 def createOrUpdateTourneysPlayers(self
, summary
):
2870 tourneysPlayersIds
={}
2871 for player
in summary
.players
:
2872 playerId
= summary
.dbid_pids
[player
]
2873 cursor
= self
.get_cursor()
2874 cursor
.execute (self
.sql
.query
['getTourneysPlayersByIds'].replace('%s', self
.sql
.query
['placeholder']),
2875 (summary
.tourneyId
, playerId
))
2876 columnNames
=[desc
[0] for desc
in cursor
.description
]
2877 result
=cursor
.fetchone()
2880 expectedValues
= ('rank', 'winnings', 'winningsCurrency', 'rebuyCount', 'addOnCount', 'koCount')
2882 resultDict
= dict(zip(columnNames
, result
))
2883 tourneysPlayersIds
[player
[1]]=result
[0]
2884 for ev
in expectedValues
:
2886 if ev
!="winnings" and ev
!="winningsCurrency":
2887 summaryAttribute
+="s"
2889 if getattr(summary
, summaryAttribute
)[player
]==None and resultDict
[ev
]!=None:#DB has this value but object doesnt, so update object
2890 setattr(summary
, summaryAttribute
, resultDict
[ev
][player
])
2891 elif getattr(summary
, summaryAttribute
)[player
]!=None and resultDict
[ev
]==None:#object has this value but DB doesnt, so update DB
2894 q
= self
.sql
.query
['updateTourneysPlayer'].replace('%s', self
.sql
.query
['placeholder'])
2895 inputs
= (summary
.ranks
[player
],
2896 summary
.winnings
[player
],
2897 summary
.winningsCurrency
[player
],
2898 summary
.rebuyCounts
[player
],
2899 summary
.addOnCounts
[player
],
2900 summary
.koCounts
[player
],
2901 tourneysPlayersIds
[player
[1]]
2904 #pp = pprint.PrettyPrinter(indent=4)
2906 cursor
.execute(q
, inputs
)
2908 #print "all values: tourneyId",summary.tourneyId, "playerId",playerId, "rank",summary.ranks[player], "winnings",summary.winnings[player], "winCurr",summary.winningsCurrency[player], summary.rebuyCounts[player], summary.addOnCounts[player], summary.koCounts[player]
2909 if summary
.ranks
[player
]:
2910 cursor
.execute (self
.sql
.query
['insertTourneysPlayer'].replace('%s', self
.sql
.query
['placeholder']),
2911 (summary
.tourneyId
, playerId
, int(summary
.ranks
[player
]), int(summary
.winnings
[player
]), summary
.winningsCurrency
[player
],
2912 summary
.rebuyCounts
[player
], summary
.addOnCounts
[player
], summary
.koCounts
[player
]))
2914 cursor
.execute (self
.sql
.query
['insertTourneysPlayer'].replace('%s', self
.sql
.query
['placeholder']),
2915 (summary
.tourneyId
, playerId
, None, None, None,
2916 summary
.rebuyCounts
[player
], summary
.addOnCounts
[player
], summary
.koCounts
[player
]))
2917 tourneysPlayersIds
[player
[1]]=self
.get_last_insert_id(cursor
)
2918 return tourneysPlayersIds
2920 def getSqlTourneysPlayersIDs(self
, hand
):
2922 if(self
.tpcache
== None):
2923 self
.tpcache
= LambdaDict(lambda key
:self
.insertTourneysPlayers(key
[0], key
[1]))
2925 for player
in hand
.players
:
2926 playerId
= hand
.dbid_pids
[player
[1]]
2927 result
[player
[1]] = self
.tpcache
[(playerId
,hand
.tourneyId
)]
2928 # NOTE: Using the LambdaDict does the same thing as:
2929 #if player in self.pcache:
2930 # #print "DEBUG: cachehit"
2933 # self.pcache[player] = self.insertPlayer(player, siteid)
2934 #result[player] = self.pcache[player]
2938 def insertTourneysPlayers(self
, playerId
, tourneyId
):
2940 c
= self
.get_cursor()
2941 q
= self
.sql
.query
['getTourneysPlayersByIds']
2942 q
= q
.replace('%s', self
.sql
.query
['placeholder'])
2944 #NOTE/FIXME?: MySQL has ON DUPLICATE KEY UPDATE
2946 # INSERT INTO `tags` (`tag`, `count`)
2948 # ON DUPLICATE KEY UPDATE `count`=`count`+1;
2951 #print "DEBUG: name: %s site: %s" %(name, site_id)
2953 c
.execute (q
, (tourneyId
, playerId
))
2956 if (tmp
== None): #new player
2957 c
.execute (self
.sql
.query
['insertTourneysPlayer'].replace('%s',self
.sql
.query
['placeholder'])
2958 ,(tourneyId
, playerId
, None, None, None, None, None, None))
2959 #Get last id might be faster here.
2960 #c.execute ("SELECT id FROM Players WHERE name=%s", (name,))
2961 result
= self
.get_last_insert_id(c
)
2966 def getTourneyTypesIds(self
):
2967 c
= self
.connection
.cursor()
2968 c
.execute(self
.sql
.query
['getTourneyTypesIds'])
2969 result
= c
.fetchall()
2971 #end def getTourneyTypesIds
2973 def getTourneyInfo(self
, siteName
, tourneyNo
):
2974 c
= self
.get_cursor()
2975 c
.execute(self
.sql
.query
['getTourneyInfo'], (siteName
, tourneyNo
))
2976 columnNames
=c
.description
2979 for column
in columnNames
:
2980 names
.append(column
[0])
2984 #end def getTourneyInfo
2986 def getTourneyPlayerInfo(self
, siteName
, tourneyNo
, playerName
):
2987 c
= self
.get_cursor()
2988 c
.execute(self
.sql
.query
['getTourneyPlayerInfo'], (siteName
, tourneyNo
, playerName
))
2989 columnNames
=c
.description
2992 for column
in columnNames
:
2993 names
.append(column
[0])
2997 #end def getTourneyPlayerInfo
3000 # Class used to hold all the data needed to write a hand to the db
3001 # mainParser() in fpdb_parse_logic.py creates one of these and then passes it to
3002 # self.insert_queue_hands()
3006 def __init__(self
, finished
= False): # db_name and game not used any more
3008 self
.finished
= finished
3010 self
.settings
= None
3012 self
.category
= None
3013 self
.siteTourneyNo
= None
3016 self
.knockout
= None
3018 self
.prizepool
= None
3019 self
.tourneyStartTime
= None
3020 self
.isTourney
= None
3021 self
.tourneyTypeId
= None
3023 self
.siteHandNo
= None
3024 self
.gametypeID
= None
3025 self
.handStartTime
= None
3027 self
.playerIDs
= None
3028 self
.startCashes
= None
3029 self
.positions
= None
3031 self
.cardValues
= None
3032 self
.cardSuits
= None
3033 self
.boardValues
= None
3034 self
.boardSuits
= None
3035 self
.winnings
= None
3037 self
.actionTypes
= None
3039 self
.actionAmounts
= None
3040 self
.actionNos
= None
3041 self
.hudImportData
= None
3042 self
.maxSeats
= None
3043 self
.tableName
= None
3046 print _("%s error: %s") % ("HandToWrite.init", str(sys
.exc_info()))
3050 def set_all( self
, config
, settings
, base
, category
, siteTourneyNo
, buyin
3051 , fee
, knockout
, entries
, prizepool
, tourneyStartTime
3052 , isTourney
, tourneyTypeId
, siteID
, siteHandNo
3053 , gametypeID
, handStartTime
, names
, playerIDs
, startCashes
3054 , positions
, antes
, cardValues
, cardSuits
, boardValues
, boardSuits
3055 , winnings
, rakes
, actionTypes
, allIns
, actionAmounts
3056 , actionNos
, hudImportData
, maxSeats
, tableName
, seatNos
):
3059 self
.config
= config
3060 self
.settings
= settings
3062 self
.category
= category
3063 self
.siteTourneyNo
= siteTourneyNo
3066 self
.knockout
= knockout
3067 self
.entries
= entries
3068 self
.prizepool
= prizepool
3069 self
.tourneyStartTime
= tourneyStartTime
3070 self
.isTourney
= isTourney
3071 self
.tourneyTypeId
= tourneyTypeId
3072 self
.siteID
= siteID
3073 self
.siteHandNo
= siteHandNo
3074 self
.gametypeID
= gametypeID
3075 self
.handStartTime
= handStartTime
3077 self
.playerIDs
= playerIDs
3078 self
.startCashes
= startCashes
3079 self
.positions
= positions
3081 self
.cardValues
= cardValues
3082 self
.cardSuits
= cardSuits
3083 self
.boardValues
= boardValues
3084 self
.boardSuits
= boardSuits
3085 self
.winnings
= winnings
3087 self
.actionTypes
= actionTypes
3088 self
.allIns
= allIns
3089 self
.actionAmounts
= actionAmounts
3090 self
.actionNos
= actionNos
3091 self
.hudImportData
= hudImportData
3092 self
.maxSeats
= maxSeats
3093 self
.tableName
= tableName
3094 self
.seatNos
= seatNos
3096 print _("%s error: %s") % ("HandToWrite.set_all", str(sys
.exc_info()))
3100 def get_finished(self
):
3101 return( self
.finished
)
3102 # end def get_finished
3104 def get_siteHandNo(self
):
3105 return( self
.siteHandNo
)
3106 # end def get_siteHandNo
3109 if __name__
=="__main__":
3110 c
= Configuration
.Config()
3111 sql
= SQL
.Sql(db_server
= 'sqlite')
3113 db_connection
= Database(c
) # mysql fpdb holdem
3114 # db_connection = Database(c, 'fpdb-p', 'test') # mysql fpdb holdem
3115 # db_connection = Database(c, 'PTrackSv2', 'razz') # mysql razz
3116 # db_connection = Database(c, 'ptracks', 'razz') # postgres
3117 print "database connection object = ", db_connection
.connection
3118 # db_connection.recreate_tables()
3119 db_connection
.dropAllIndexes()
3120 db_connection
.createAllIndexes()
3122 h
= db_connection
.get_last_hand()
3123 print "last hand = ", h
3125 hero
= db_connection
.get_player_id(c
, 'PokerStars', 'nutOmatic')
3127 print "nutOmatic player_id", hero
3129 # example of displaying query plan in sqlite:
3130 if db_connection
.backend
== 4:
3132 c
= db_connection
.get_cursor()
3133 c
.execute('explain query plan '+sql
.query
['get_table_name'], (h
, ))
3134 for row
in c
.fetchall():
3135 print "Query plan:", row
3139 stat_dict
= db_connection
.get_stats_from_hand(h
, "ring")
3141 for p
in stat_dict
.keys():
3142 print p
, " ", stat_dict
[p
]
3144 print _("cards ="), db_connection
.get_cards(u
'1')
3145 db_connection
.close_connection
3147 print _("get_stats took: %4.3f seconds") % (t1
-t0
)
3149 print _("Press ENTER to continue.")
3150 sys
.stdin
.readline()
3152 #Code borrowed from http://push.cx/2008/caching-dictionaries-in-python-vs-ruby
3153 class LambdaDict(dict):
3154 def __init__(self
, l
):
3155 super(LambdaDict
, self
).__init
__()
3158 def __getitem__(self
, key
):
3160 return self
.get(key
)
3162 self
.__setitem
__(key
, self
.l(key
))
3163 return self
.get(key
)