1 | # -*- coding: utf-8 -*- |
---|
2 | # pylint: disable=no-member |
---|
3 | """ |
---|
4 | | This file is part of the web2py Web Framework |
---|
5 | | Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu> |
---|
6 | | License: BSD |
---|
7 | | |
---|
8 | |
---|
9 | This file contains the DAL support for many relational databases, including: |
---|
10 | |
---|
11 | - SQLite & SpatiaLite |
---|
12 | - MySQL |
---|
13 | - Postgres |
---|
14 | - Firebird |
---|
15 | - Oracle |
---|
16 | - MS SQL |
---|
17 | - DB2 |
---|
18 | - Interbase |
---|
19 | - Ingres |
---|
20 | - Informix (9+ and SE) |
---|
21 | - SapDB (experimental) |
---|
22 | - Cubrid (experimental) |
---|
23 | - CouchDB (experimental) |
---|
24 | - MongoDB (in progress) |
---|
25 | - Google:nosql |
---|
26 | - Google:sql |
---|
27 | - Teradata |
---|
28 | - IMAP (experimental) |
---|
29 | |
---|
30 | Example of usage:: |
---|
31 | |
---|
32 | >>> # from dal import DAL, Field |
---|
33 | |
---|
34 | ### create DAL connection (and create DB if it doesn't exist) |
---|
35 | >>> db = DAL(('sqlite://storage.sqlite','mysql://a:b@localhost/x'), |
---|
36 | ... folder=None) |
---|
37 | |
---|
38 | ### define a table 'person' (create/alter as necessary) |
---|
39 | >>> person = db.define_table('person',Field('name','string')) |
---|
40 | |
---|
41 | ### insert a record |
---|
42 | >>> id = person.insert(name='James') |
---|
43 | |
---|
44 | ### retrieve it by id |
---|
45 | >>> james = person(id) |
---|
46 | |
---|
47 | ### retrieve it by name |
---|
48 | >>> james = person(name='James') |
---|
49 | |
---|
50 | ### retrieve it by arbitrary query |
---|
51 | >>> query = (person.name=='James') & (person.name.startswith('J')) |
---|
52 | >>> james = db(query).select(person.ALL)[0] |
---|
53 | |
---|
54 | ### update one record |
---|
55 | >>> james.update_record(name='Jim') |
---|
56 | <Row {'id': 1, 'name': 'Jim'}> |
---|
57 | |
---|
58 | ### update multiple records by query |
---|
59 | >>> db(person.name.like('J%')).update(name='James') |
---|
60 | 1 |
---|
61 | |
---|
62 | ### delete records by query |
---|
63 | >>> db(person.name.lower() == 'jim').delete() |
---|
64 | 0 |
---|
65 | |
---|
66 | ### retrieve multiple records (rows) |
---|
67 | >>> people = db(person).select(orderby=person.name, |
---|
68 | ... groupby=person.name, limitby=(0,100)) |
---|
69 | |
---|
70 | ### further filter them |
---|
71 | >>> james = people.find(lambda row: row.name == 'James').first() |
---|
72 | >>> print james.id, james.name |
---|
73 | 1 James |
---|
74 | |
---|
75 | ### check aggregates |
---|
76 | >>> counter = person.id.count() |
---|
77 | >>> print db(person).select(counter).first()(counter) |
---|
78 | 1 |
---|
79 | |
---|
80 | ### delete one record |
---|
81 | >>> james.delete_record() |
---|
82 | 1 |
---|
83 | |
---|
84 | ### delete (drop) entire database table |
---|
85 | >>> person.drop() |
---|
86 | |
---|
87 | |
---|
88 | Supported DAL URI strings:: |
---|
89 | |
---|
90 | 'sqlite://test.db' |
---|
91 | 'spatialite://test.db' |
---|
92 | 'sqlite:memory' |
---|
93 | 'spatialite:memory' |
---|
94 | 'jdbc:sqlite://test.db' |
---|
95 | 'mysql://root:none@localhost/test' |
---|
96 | 'postgres://mdipierro:password@localhost/test' |
---|
97 | 'postgres:psycopg2://mdipierro:password@localhost/test' |
---|
98 | 'postgres:pg8000://mdipierro:password@localhost/test' |
---|
99 | 'jdbc:postgres://mdipierro:none@localhost/test' |
---|
100 | 'mssql://web2py:none@A64X2/web2py_test' |
---|
101 | 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings |
---|
102 | 'mssql3://web2py:none@A64X2/web2py_test' # better pagination (requires >= 2005) |
---|
103 | 'mssql4://web2py:none@A64X2/web2py_test' # best pagination (requires >= 2012) |
---|
104 | 'pytds://user:password@server:port/database' # python-tds |
---|
105 | 'oracle://username:password@database' |
---|
106 | 'firebird://user:password@server:3050/database' |
---|
107 | 'db2:ibm_db_dbi://DSN=dsn;UID=user;PWD=pass' |
---|
108 | 'db2:pyodbc://driver=DB2;hostname=host;database=database;uid=user;pwd=password;port=port' |
---|
109 | 'firebird://username:password@hostname/database' |
---|
110 | 'firebird_embedded://username:password@c://path' |
---|
111 | 'informix://user:password@server:3050/database' |
---|
112 | 'informixu://user:password@server:3050/database' # unicode informix |
---|
113 | 'ingres://database' # or use an ODBC connection string, e.g. 'ingres://dsn=dsn_name' |
---|
114 | 'google:datastore' # for google app engine datastore (uses ndb by default) |
---|
115 | 'google:sql' # for google app engine with sql (mysql compatible) |
---|
116 | 'teradata://DSN=dsn;UID=user;PWD=pass; DATABASE=database' # experimental |
---|
117 | 'imap://user:password@server:port' # experimental |
---|
118 | 'mongodb://user:password@server:port/database' # experimental |
---|
119 | |
---|
120 | For more info:: |
---|
121 | |
---|
122 | help(DAL) |
---|
123 | help(Field) |
---|
124 | |
---|
125 | """ |
---|
126 | |
---|
127 | import glob |
---|
128 | import logging |
---|
129 | import socket |
---|
130 | import threading |
---|
131 | import time |
---|
132 | import traceback |
---|
133 | import urllib |
---|
134 | |
---|
135 | from ._compat import ( |
---|
136 | PY2, |
---|
137 | pickle, |
---|
138 | hashlib_md5, |
---|
139 | pjoin, |
---|
140 | copyreg, |
---|
141 | integer_types, |
---|
142 | with_metaclass, |
---|
143 | long, |
---|
144 | unquote, |
---|
145 | iteritems, |
---|
146 | ) |
---|
147 | from ._globals import GLOBAL_LOCKER, THREAD_LOCAL, DEFAULT |
---|
148 | from ._load import OrderedDict |
---|
149 | from .helpers.classes import ( |
---|
150 | Serializable, |
---|
151 | SQLCallableList, |
---|
152 | BasicStorage, |
---|
153 | RecordUpdater, |
---|
154 | RecordDeleter, |
---|
155 | TimingHandler, |
---|
156 | ) |
---|
157 | from .helpers.methods import hide_password, smart_query, auto_validators, auto_represent, uuidstr |
---|
158 | from .helpers.regex import REGEX_PYTHON_KEYWORDS, REGEX_DBNAME |
---|
159 | from .helpers.rest import RestParser |
---|
160 | from .helpers.serializers import serializers |
---|
161 | from .objects import Table, Field, Rows, Row, Set |
---|
162 | from .adapters.base import BaseAdapter, NullAdapter |
---|
163 | from .default_validators import default_validators |
---|
164 | |
---|
165 | TABLE_ARGS = set( |
---|
166 | ( |
---|
167 | "migrate", |
---|
168 | "primarykey", |
---|
169 | "fake_migrate", |
---|
170 | "format", |
---|
171 | "redefine", |
---|
172 | "singular", |
---|
173 | "plural", |
---|
174 | "trigger_name", |
---|
175 | "sequence_name", |
---|
176 | "fields", |
---|
177 | "common_filter", |
---|
178 | "polymodel", |
---|
179 | "table_class", |
---|
180 | "on_define", |
---|
181 | "rname", |
---|
182 | ) |
---|
183 | ) |
---|
184 | |
---|
185 | |
---|
186 | class MetaDAL(type): |
---|
187 | def __call__(cls, *args, **kwargs): |
---|
188 | #: intercept arguments for DAL customisation on call |
---|
189 | intercepts = [ |
---|
190 | "logger", |
---|
191 | "representers", |
---|
192 | "serializers", |
---|
193 | "uuid", |
---|
194 | "validators", |
---|
195 | "validators_method", |
---|
196 | "Table", |
---|
197 | "Row", |
---|
198 | ] |
---|
199 | intercepted = [] |
---|
200 | for name in intercepts: |
---|
201 | val = kwargs.get(name) |
---|
202 | if val: |
---|
203 | intercepted.append((name, val)) |
---|
204 | del kwargs[name] |
---|
205 | for tup in intercepted: |
---|
206 | setattr(cls, tup[0], tup[1]) |
---|
207 | |
---|
208 | obj = super(MetaDAL, cls).__call__(*args, **kwargs) |
---|
209 | return obj |
---|
210 | |
---|
211 | |
---|
212 | class DAL(with_metaclass(MetaDAL, Serializable, BasicStorage)): |
---|
213 | """ |
---|
214 | An instance of this class represents a database connection |
---|
215 | |
---|
216 | Args: |
---|
217 | uri(str): contains information for connecting to a database. |
---|
218 | Defaults to `'sqlite://dummy.db'` |
---|
219 | |
---|
220 | Note: |
---|
221 | experimental: you can specify a dictionary as uri |
---|
222 | parameter i.e. with:: |
---|
223 | |
---|
224 | db = DAL({"uri": "sqlite://storage.sqlite", |
---|
225 | "tables": {...}, ...}) |
---|
226 | |
---|
227 | for an example of dict input you can check the output |
---|
228 | of the scaffolding db model with |
---|
229 | |
---|
230 | db.as_dict() |
---|
231 | |
---|
232 | Note that for compatibility with Python older than |
---|
233 | version 2.6.5 you should cast your dict input keys |
---|
234 | to str due to a syntax limitation on kwarg names. |
---|
235 | for proper DAL dictionary input you can use one of:: |
---|
236 | |
---|
237 | obj = serializers.cast_keys(dict, [encoding="utf-8"]) |
---|
238 | #or else (for parsing json input) |
---|
239 | obj = serializers.loads_json(data, unicode_keys=False) |
---|
240 | |
---|
241 | pool_size: How many open connections to make to the database object. |
---|
242 | folder: where .table files will be created. Automatically set within |
---|
243 | web2py. Use an explicit path when using DAL outside web2py |
---|
244 | db_codec: string encoding of the database (default: 'UTF-8') |
---|
245 | table_hash: database identifier with .tables. If your connection hash |
---|
246 | change you can still using old .tables if they have db_hash |
---|
247 | as prefix |
---|
248 | check_reserved: list of adapters to check tablenames and column names |
---|
249 | against sql/nosql reserved keywords. Defaults to `None` |
---|
250 | |
---|
251 | - 'common' List of sql keywords that are common to all database |
---|
252 | types such as "SELECT, INSERT". (recommended) |
---|
253 | - 'all' Checks against all known SQL keywords |
---|
254 | - '<adaptername>'' Checks against the specific adapters list of |
---|
255 | keywords |
---|
256 | - '<adaptername>_nonreserved' Checks against the specific adapters |
---|
257 | list of nonreserved keywords. (if available) |
---|
258 | |
---|
259 | migrate: sets default migrate behavior for all tables |
---|
260 | fake_migrate: sets default fake_migrate behavior for all tables |
---|
261 | migrate_enabled: If set to False disables ALL migrations |
---|
262 | fake_migrate_all: If set to True fake migrates ALL tables |
---|
263 | attempts: Number of times to attempt connecting |
---|
264 | auto_import: If set to True, tries import automatically table |
---|
265 | definitions from the databases folder (works only for simple models) |
---|
266 | bigint_id: If set, turn on bigint instead of int for id and reference |
---|
267 | fields |
---|
268 | lazy_tables: delays table definition until table access |
---|
269 | after_connection: can a callable that will be executed after the |
---|
270 | connection |
---|
271 | |
---|
272 | Example: |
---|
273 | Use as:: |
---|
274 | |
---|
275 | db = DAL('sqlite://test.db') |
---|
276 | |
---|
277 | or:: |
---|
278 | |
---|
279 | db = DAL(**{"uri": ..., "tables": [...]...}) # experimental |
---|
280 | |
---|
281 | db.define_table('tablename', Field('fieldname1'), |
---|
282 | Field('fieldname2')) |
---|
283 | |
---|
284 | |
---|
285 | """ |
---|
286 | |
---|
287 | serializers = None |
---|
288 | validators = None |
---|
289 | representers = {} |
---|
290 | validators_method = default_validators |
---|
291 | uuid = uuidstr |
---|
292 | logger = logging.getLogger("pyDAL") |
---|
293 | |
---|
294 | Field = Field |
---|
295 | Table = Table |
---|
296 | Rows = Rows |
---|
297 | Row = Row |
---|
298 | |
---|
299 | record_operators = {"update_record": RecordUpdater, "delete_record": RecordDeleter} |
---|
300 | |
---|
301 | execution_handlers = [TimingHandler] |
---|
302 | |
---|
303 | def __new__(cls, uri="sqlite://dummy.db", *args, **kwargs): |
---|
304 | if not hasattr(THREAD_LOCAL, "_pydal_db_instances_"): |
---|
305 | THREAD_LOCAL._pydal_db_instances_ = {} |
---|
306 | if not hasattr(THREAD_LOCAL, "_pydal_db_instances_zombie_"): |
---|
307 | THREAD_LOCAL._pydal_db_instances_zombie_ = {} |
---|
308 | if uri == "<zombie>": |
---|
309 | db_uid = kwargs["db_uid"] # a zombie must have a db_uid! |
---|
310 | if db_uid in THREAD_LOCAL._pydal_db_instances_: |
---|
311 | db_group = THREAD_LOCAL._pydal_db_instances_[db_uid] |
---|
312 | db = db_group[-1] |
---|
313 | elif db_uid in THREAD_LOCAL._pydal_db_instances_zombie_: |
---|
314 | db = THREAD_LOCAL._pydal_db_instances_zombie_[db_uid] |
---|
315 | else: |
---|
316 | db = super(DAL, cls).__new__(cls) |
---|
317 | THREAD_LOCAL._pydal_db_instances_zombie_[db_uid] = db |
---|
318 | else: |
---|
319 | db_uid = kwargs.get("db_uid", hashlib_md5(repr(uri)).hexdigest()) |
---|
320 | if db_uid in THREAD_LOCAL._pydal_db_instances_zombie_: |
---|
321 | db = THREAD_LOCAL._pydal_db_instances_zombie_[db_uid] |
---|
322 | del THREAD_LOCAL._pydal_db_instances_zombie_[db_uid] |
---|
323 | else: |
---|
324 | db = super(DAL, cls).__new__(cls) |
---|
325 | db_group = THREAD_LOCAL._pydal_db_instances_.get(db_uid, []) |
---|
326 | db_group.append(db) |
---|
327 | THREAD_LOCAL._pydal_db_instances_[db_uid] = db_group |
---|
328 | db._db_uid = db_uid |
---|
329 | return db |
---|
330 | |
---|
331 | @staticmethod |
---|
332 | def set_folder(folder): |
---|
333 | # ## this allows gluon to set a folder for this thread |
---|
334 | # ## <<<<<<<<< Should go away as new DAL replaces old sql.py |
---|
335 | BaseAdapter.set_folder(folder) |
---|
336 | |
---|
337 | @staticmethod |
---|
338 | def get_instances(): |
---|
339 | """ |
---|
340 | Returns a dictionary with uri as key with timings and defined tables:: |
---|
341 | |
---|
342 | {'sqlite://storage.sqlite': { |
---|
343 | 'dbstats': [(select auth_user.email from auth_user, 0.02009)], |
---|
344 | 'dbtables': { |
---|
345 | 'defined': ['auth_cas', 'auth_event', 'auth_group', |
---|
346 | 'auth_membership', 'auth_permission', 'auth_user'], |
---|
347 | 'lazy': '[]' |
---|
348 | } |
---|
349 | } |
---|
350 | } |
---|
351 | |
---|
352 | """ |
---|
353 | dbs = getattr(THREAD_LOCAL, "_pydal_db_instances_", {}).items() |
---|
354 | infos = {} |
---|
355 | for db_uid, db_group in dbs: |
---|
356 | for db in db_group: |
---|
357 | if not db._uri: |
---|
358 | continue |
---|
359 | k = hide_password(db._adapter.uri) |
---|
360 | infos[k] = dict( |
---|
361 | dbstats=[(row[0], row[1]) for row in db._timings], |
---|
362 | dbtables={ |
---|
363 | "defined": sorted( |
---|
364 | list(set(db.tables) - set(db._LAZY_TABLES.keys())) |
---|
365 | ), |
---|
366 | "lazy": sorted(db._LAZY_TABLES.keys()), |
---|
367 | }, |
---|
368 | ) |
---|
369 | return infos |
---|
370 | |
---|
371 | @staticmethod |
---|
372 | def distributed_transaction_begin(*instances): |
---|
373 | if not instances: |
---|
374 | return |
---|
375 | thread_key = "%s.%s" % (socket.gethostname(), threading.currentThread()) |
---|
376 | keys = ["%s.%i" % (thread_key, i) for (i, db) in instances] |
---|
377 | instances = enumerate(instances) |
---|
378 | for (i, db) in instances: |
---|
379 | if not db._adapter.support_distributed_transaction(): |
---|
380 | raise SyntaxError( |
---|
381 | "distributed transaction not suported by %s" % db._dbname |
---|
382 | ) |
---|
383 | for (i, db) in instances: |
---|
384 | db._adapter.distributed_transaction_begin(keys[i]) |
---|
385 | |
---|
386 | @staticmethod |
---|
387 | def distributed_transaction_commit(*instances): |
---|
388 | if not instances: |
---|
389 | return |
---|
390 | instances = enumerate(instances) |
---|
391 | thread_key = "%s.%s" % (socket.gethostname(), threading.currentThread()) |
---|
392 | keys = ["%s.%i" % (thread_key, i) for (i, db) in instances] |
---|
393 | for (i, db) in instances: |
---|
394 | if not db._adapter.support_distributed_transaction(): |
---|
395 | raise SyntaxError( |
---|
396 | "distributed transaction not suported by %s" % db._dbanme |
---|
397 | ) |
---|
398 | try: |
---|
399 | for (i, db) in instances: |
---|
400 | db._adapter.prepare(keys[i]) |
---|
401 | except: |
---|
402 | for (i, db) in instances: |
---|
403 | db._adapter.rollback_prepared(keys[i]) |
---|
404 | raise RuntimeError("failure to commit distributed transaction") |
---|
405 | else: |
---|
406 | for (i, db) in instances: |
---|
407 | db._adapter.commit_prepared(keys[i]) |
---|
408 | return |
---|
409 | |
---|
410 | def __init__( |
---|
411 | self, |
---|
412 | uri="sqlite://dummy.db", |
---|
413 | pool_size=0, |
---|
414 | folder=None, |
---|
415 | db_codec="UTF-8", |
---|
416 | check_reserved=None, |
---|
417 | migrate=True, |
---|
418 | fake_migrate=False, |
---|
419 | migrate_enabled=True, |
---|
420 | fake_migrate_all=False, |
---|
421 | decode_credentials=False, |
---|
422 | driver_args=None, |
---|
423 | adapter_args=None, |
---|
424 | attempts=5, |
---|
425 | auto_import=False, |
---|
426 | bigint_id=False, |
---|
427 | debug=False, |
---|
428 | lazy_tables=False, |
---|
429 | db_uid=None, |
---|
430 | after_connection=None, |
---|
431 | tables=None, |
---|
432 | ignore_field_case=True, |
---|
433 | entity_quoting=True, |
---|
434 | table_hash=None, |
---|
435 | ): |
---|
436 | |
---|
437 | if uri == "<zombie>" and db_uid is not None: |
---|
438 | return |
---|
439 | super(DAL, self).__init__() |
---|
440 | |
---|
441 | if not issubclass(self.Rows, Rows): |
---|
442 | raise RuntimeError("`Rows` class must be a subclass of pydal.objects.Rows") |
---|
443 | |
---|
444 | if not issubclass(self.Row, Row): |
---|
445 | raise RuntimeError("`Row` class must be a subclass of pydal.objects.Row") |
---|
446 | |
---|
447 | from .drivers import DRIVERS, is_jdbc |
---|
448 | |
---|
449 | self._drivers_available = DRIVERS |
---|
450 | |
---|
451 | if not decode_credentials: |
---|
452 | credential_decoder = lambda cred: cred |
---|
453 | else: |
---|
454 | credential_decoder = lambda cred: unquote(cred) |
---|
455 | self._folder = folder |
---|
456 | if folder: |
---|
457 | self.set_folder(folder) |
---|
458 | self._uri = uri |
---|
459 | self._pool_size = pool_size |
---|
460 | self._db_codec = db_codec |
---|
461 | self._pending_references = {} |
---|
462 | self._request_tenant = "request_tenant" |
---|
463 | self._common_fields = [] |
---|
464 | self._referee_name = "%(table)s" |
---|
465 | self._bigint_id = bigint_id |
---|
466 | self._debug = debug |
---|
467 | self._migrated = [] |
---|
468 | self._LAZY_TABLES = {} |
---|
469 | self._lazy_tables = lazy_tables |
---|
470 | self._tables = SQLCallableList() |
---|
471 | self._aliased_tables = threading.local() |
---|
472 | self._driver_args = driver_args |
---|
473 | self._adapter_args = adapter_args |
---|
474 | self._check_reserved = check_reserved |
---|
475 | self._decode_credentials = decode_credentials |
---|
476 | self._attempts = attempts |
---|
477 | self._ignore_field_case = ignore_field_case |
---|
478 | |
---|
479 | if not str(attempts).isdigit() or attempts < 0: |
---|
480 | attempts = 5 |
---|
481 | if uri: |
---|
482 | uris = isinstance(uri, (list, tuple)) and uri or [uri] |
---|
483 | connected = False |
---|
484 | for k in range(attempts): |
---|
485 | for uri in uris: |
---|
486 | try: |
---|
487 | from .adapters import adapters |
---|
488 | |
---|
489 | if is_jdbc and not uri.startswith("jdbc:"): |
---|
490 | uri = "jdbc:" + uri |
---|
491 | self._dbname = REGEX_DBNAME.match(uri).group() |
---|
492 | # notice that driver args or {} else driver_args |
---|
493 | # defaults to {} global, not correct |
---|
494 | kwargs = dict( |
---|
495 | db=self, |
---|
496 | uri=uri, |
---|
497 | pool_size=pool_size, |
---|
498 | folder=folder, |
---|
499 | db_codec=db_codec, |
---|
500 | credential_decoder=credential_decoder, |
---|
501 | driver_args=driver_args or {}, |
---|
502 | adapter_args=adapter_args or {}, |
---|
503 | after_connection=after_connection, |
---|
504 | entity_quoting=entity_quoting, |
---|
505 | ) |
---|
506 | adapter = adapters.get_for(self._dbname) |
---|
507 | self._adapter = adapter(**kwargs) |
---|
508 | # self._adapter.ignore_field_case = ignore_field_case |
---|
509 | if bigint_id: |
---|
510 | self._adapter.dialect._force_bigints() |
---|
511 | connected = True |
---|
512 | break |
---|
513 | except SyntaxError: |
---|
514 | raise |
---|
515 | except Exception: |
---|
516 | tb = traceback.format_exc() |
---|
517 | self.logger.debug( |
---|
518 | "DEBUG: connect attempt %i, connection error:\n%s" % (k, tb) |
---|
519 | ) |
---|
520 | if connected: |
---|
521 | break |
---|
522 | else: |
---|
523 | time.sleep(1) |
---|
524 | if not connected: |
---|
525 | raise RuntimeError( |
---|
526 | "Failure to connect, tried %d times:\n%s" % (attempts, tb) |
---|
527 | ) |
---|
528 | else: |
---|
529 | self._adapter = NullAdapter( |
---|
530 | db=self, |
---|
531 | pool_size=0, |
---|
532 | uri="None", |
---|
533 | folder=folder, |
---|
534 | db_codec=db_codec, |
---|
535 | after_connection=after_connection, |
---|
536 | entity_quoting=entity_quoting, |
---|
537 | ) |
---|
538 | migrate = fake_migrate = False |
---|
539 | self.validators_method = None |
---|
540 | self.validators = None |
---|
541 | adapter = self._adapter |
---|
542 | self._uri_hash = table_hash or hashlib_md5(adapter.uri).hexdigest() |
---|
543 | if check_reserved: |
---|
544 | from .contrib.reserved_sql_keywords import ADAPTERS as RSK |
---|
545 | |
---|
546 | self.RSK = RSK |
---|
547 | self._migrate = migrate |
---|
548 | self._fake_migrate = fake_migrate |
---|
549 | self._migrate_enabled = migrate_enabled |
---|
550 | self._fake_migrate_all = fake_migrate_all |
---|
551 | if self.serializers is not None: |
---|
552 | for k, v in self.serializers.items(): |
---|
553 | serializers._custom_[k] = v |
---|
554 | if auto_import or tables: |
---|
555 | self.import_table_definitions(adapter.folder, tables=tables) |
---|
556 | |
---|
557 | @property |
---|
558 | def tables(self): |
---|
559 | return self._tables |
---|
560 | |
---|
561 | @property |
---|
562 | def _timings(self): |
---|
563 | return getattr(THREAD_LOCAL, "_pydal_timings_", []) |
---|
564 | |
---|
565 | @property |
---|
566 | def _lastsql(self): |
---|
567 | return self._timings[-1] if self._timings else None |
---|
568 | |
---|
569 | def import_table_definitions( |
---|
570 | self, path, migrate=False, fake_migrate=False, tables=None |
---|
571 | ): |
---|
572 | if tables: |
---|
573 | for table in tables: |
---|
574 | self.define_table(**table) |
---|
575 | else: |
---|
576 | pattern = pjoin(path, self._uri_hash + "_*.table") |
---|
577 | for filename in glob.glob(pattern): |
---|
578 | tfile = self._adapter.migrator.file_open(filename, "r" if PY2 else "rb") |
---|
579 | try: |
---|
580 | sql_fields = pickle.load(tfile) |
---|
581 | name = filename[len(pattern) - 7 : -6] |
---|
582 | mf = [ |
---|
583 | ( |
---|
584 | value["sortable"], |
---|
585 | Field( |
---|
586 | key, |
---|
587 | type=value["type"], |
---|
588 | length=value.get("length", None), |
---|
589 | notnull=value.get("notnull", False), |
---|
590 | unique=value.get("unique", False), |
---|
591 | ), |
---|
592 | ) |
---|
593 | for key, value in iteritems(sql_fields) |
---|
594 | ] |
---|
595 | mf.sort(key=lambda a: a[0]) |
---|
596 | self.define_table( |
---|
597 | name, |
---|
598 | *[item[1] for item in mf], |
---|
599 | **dict(migrate=migrate, fake_migrate=fake_migrate) |
---|
600 | ) |
---|
601 | finally: |
---|
602 | self._adapter.migrator.file_close(tfile) |
---|
603 | |
---|
604 | def check_reserved_keyword(self, name): |
---|
605 | """ |
---|
606 | Validates `name` against SQL keywords |
---|
607 | Uses self._check_reserved which is a list of operators to use. |
---|
608 | """ |
---|
609 | for backend in self._check_reserved: |
---|
610 | if name.upper() in self.RSK[backend]: |
---|
611 | raise SyntaxError( |
---|
612 | 'invalid table/column name "%s" is a "%s" reserved SQL/NOSQL keyword' |
---|
613 | % (name, backend.upper()) |
---|
614 | ) |
---|
615 | |
---|
616 | def parse_as_rest(self, patterns, args, vars, queries=None, nested_select=True): |
---|
617 | return RestParser(self).parse(patterns, args, vars, queries, nested_select) |
---|
618 | |
---|
619 | def define_table(self, tablename, *fields, **kwargs): |
---|
620 | invalid_kwargs = set(kwargs) - TABLE_ARGS |
---|
621 | if invalid_kwargs: |
---|
622 | raise SyntaxError( |
---|
623 | 'invalid table "%s" attributes: %s' % (tablename, invalid_kwargs) |
---|
624 | ) |
---|
625 | if not fields and "fields" in kwargs: |
---|
626 | fields = kwargs.get("fields", ()) |
---|
627 | if not isinstance(tablename, str): |
---|
628 | if isinstance(tablename, unicode): |
---|
629 | try: |
---|
630 | tablename = str(tablename) |
---|
631 | except UnicodeEncodeError: |
---|
632 | raise SyntaxError("invalid unicode table name") |
---|
633 | else: |
---|
634 | raise SyntaxError("missing table name") |
---|
635 | redefine = kwargs.get("redefine", False) |
---|
636 | if tablename in self.tables: |
---|
637 | if redefine: |
---|
638 | try: |
---|
639 | delattr(self, tablename) |
---|
640 | except: |
---|
641 | pass |
---|
642 | else: |
---|
643 | raise SyntaxError("table already defined: %s" % tablename) |
---|
644 | elif ( |
---|
645 | tablename.startswith("_") |
---|
646 | or tablename in dir(self) |
---|
647 | or REGEX_PYTHON_KEYWORDS.match(tablename) |
---|
648 | ): |
---|
649 | raise SyntaxError("invalid table name: %s" % tablename) |
---|
650 | elif self._check_reserved: |
---|
651 | self.check_reserved_keyword(tablename) |
---|
652 | if self._lazy_tables: |
---|
653 | if tablename not in self._LAZY_TABLES or redefine: |
---|
654 | self._LAZY_TABLES[tablename] = (tablename, fields, kwargs) |
---|
655 | table = None |
---|
656 | else: |
---|
657 | table = self.lazy_define_table(tablename, *fields, **kwargs) |
---|
658 | if tablename not in self.tables: |
---|
659 | self.tables.append(tablename) |
---|
660 | return table |
---|
661 | |
---|
662 | def lazy_define_table(self, tablename, *fields, **kwargs): |
---|
663 | kwargs_get = kwargs.get |
---|
664 | common_fields = self._common_fields |
---|
665 | if common_fields: |
---|
666 | fields = list(fields) + [ |
---|
667 | f if isinstance(f, Table) else f.clone() for f in common_fields |
---|
668 | ] |
---|
669 | |
---|
670 | table_class = kwargs_get("table_class", Table) |
---|
671 | table = table_class(self, tablename, *fields, **kwargs) |
---|
672 | table._actual = True |
---|
673 | self[tablename] = table |
---|
674 | # must follow above line to handle self references |
---|
675 | table._create_references() |
---|
676 | for field in table: |
---|
677 | if field.requires is DEFAULT: |
---|
678 | field.requires = auto_validators(field) |
---|
679 | if field.represent is None: |
---|
680 | field.represent = auto_represent(field) |
---|
681 | |
---|
682 | migrate = self._migrate_enabled and kwargs_get("migrate", self._migrate) |
---|
683 | if ( |
---|
684 | migrate |
---|
685 | and self._uri not in (None, "None") |
---|
686 | or self._adapter.dbengine == "google:datastore" |
---|
687 | ): |
---|
688 | fake_migrate = self._fake_migrate_all or kwargs_get( |
---|
689 | "fake_migrate", self._fake_migrate |
---|
690 | ) |
---|
691 | polymodel = kwargs_get("polymodel", None) |
---|
692 | try: |
---|
693 | GLOBAL_LOCKER.acquire() |
---|
694 | self._adapter.create_table( |
---|
695 | table, |
---|
696 | migrate=migrate, |
---|
697 | fake_migrate=fake_migrate, |
---|
698 | polymodel=polymodel, |
---|
699 | ) |
---|
700 | finally: |
---|
701 | GLOBAL_LOCKER.release() |
---|
702 | else: |
---|
703 | table._dbt = None |
---|
704 | on_define = kwargs_get("on_define", None) |
---|
705 | if on_define: |
---|
706 | on_define(table) |
---|
707 | return table |
---|
708 | |
---|
709 | def as_dict(self, flat=False, sanitize=True): |
---|
710 | db_uid = uri = None |
---|
711 | if not sanitize: |
---|
712 | uri, db_uid = (self._uri, self._db_uid) |
---|
713 | db_as_dict = dict( |
---|
714 | tables=[], |
---|
715 | uri=uri, |
---|
716 | db_uid=db_uid, |
---|
717 | **dict( |
---|
718 | [ |
---|
719 | (k, getattr(self, "_" + k, None)) |
---|
720 | for k in [ |
---|
721 | "pool_size", |
---|
722 | "folder", |
---|
723 | "db_codec", |
---|
724 | "check_reserved", |
---|
725 | "migrate", |
---|
726 | "fake_migrate", |
---|
727 | "migrate_enabled", |
---|
728 | "fake_migrate_all", |
---|
729 | "decode_credentials", |
---|
730 | "driver_args", |
---|
731 | "adapter_args", |
---|
732 | "attempts", |
---|
733 | "bigint_id", |
---|
734 | "debug", |
---|
735 | "lazy_tables", |
---|
736 | ] |
---|
737 | ] |
---|
738 | ) |
---|
739 | ) |
---|
740 | for table in self: |
---|
741 | db_as_dict["tables"].append(table.as_dict(flat=flat, sanitize=sanitize)) |
---|
742 | return db_as_dict |
---|
743 | |
---|
744 | def __contains__(self, tablename): |
---|
745 | try: |
---|
746 | return tablename in self.tables |
---|
747 | except AttributeError: |
---|
748 | # The instance has no .tables attribute yet |
---|
749 | return False |
---|
750 | |
---|
751 | def __iter__(self): |
---|
752 | for tablename in self.tables: |
---|
753 | yield self[tablename] |
---|
754 | |
---|
755 | def __getitem__(self, key): |
---|
756 | return self.__getattr__(str(key)) |
---|
757 | |
---|
758 | def __getattr__(self, key): |
---|
759 | if object.__getattribute__( |
---|
760 | self, "_lazy_tables" |
---|
761 | ) and key in object.__getattribute__(self, "_LAZY_TABLES"): |
---|
762 | tablename, fields, kwargs = self._LAZY_TABLES.pop(key) |
---|
763 | return self.lazy_define_table(tablename, *fields, **kwargs) |
---|
764 | aliased_tables = object.__getattribute__(self, "_aliased_tables") |
---|
765 | aliased = getattr(aliased_tables, key, None) |
---|
766 | if aliased: |
---|
767 | return aliased |
---|
768 | return BasicStorage.__getattribute__(self, key) |
---|
769 | |
---|
770 | def __setattr__(self, key, value): |
---|
771 | if key[:1] != "_" and key in self: |
---|
772 | raise SyntaxError("Object %s exists and cannot be redefined" % key) |
---|
773 | return super(DAL, self).__setattr__(key, value) |
---|
774 | |
---|
775 | def __repr__(self): |
---|
776 | if hasattr(self, "_uri"): |
---|
777 | return '<DAL uri="%s">' % hide_password(self._adapter.uri) |
---|
778 | else: |
---|
779 | return '<DAL db_uid="%s">' % self._db_uid |
---|
780 | |
---|
781 | def smart_query(self, fields, text): |
---|
782 | return Set(self, smart_query(fields, text)) |
---|
783 | |
---|
784 | def __call__(self, query=None, ignore_common_filters=None): |
---|
785 | return self.where(query, ignore_common_filters) |
---|
786 | |
---|
787 | def where(self, query=None, ignore_common_filters=None): |
---|
788 | if isinstance(query, Table): |
---|
789 | query = self._adapter.id_query(query) |
---|
790 | elif isinstance(query, Field): |
---|
791 | query = query != None |
---|
792 | elif isinstance(query, dict): |
---|
793 | icf = query.get("ignore_common_filters") |
---|
794 | if icf: |
---|
795 | ignore_common_filters = icf |
---|
796 | return Set(self, query, ignore_common_filters=ignore_common_filters) |
---|
797 | |
---|
798 | def commit(self): |
---|
799 | self._adapter.commit() |
---|
800 | object.__getattribute__(self, "_aliased_tables").__dict__.clear() |
---|
801 | |
---|
802 | def rollback(self): |
---|
803 | self._adapter.rollback() |
---|
804 | object.__getattribute__(self, "_aliased_tables").__dict__.clear() |
---|
805 | |
---|
806 | def close(self): |
---|
807 | self._adapter.close() |
---|
808 | if self._db_uid in THREAD_LOCAL._pydal_db_instances_: |
---|
809 | db_group = THREAD_LOCAL._pydal_db_instances_[self._db_uid] |
---|
810 | db_group.remove(self) |
---|
811 | if not db_group: |
---|
812 | del THREAD_LOCAL._pydal_db_instances_[self._db_uid] |
---|
813 | self._adapter._clean_tlocals() |
---|
814 | |
---|
815 | def executesql( |
---|
816 | self, |
---|
817 | query, |
---|
818 | placeholders=None, |
---|
819 | as_dict=False, |
---|
820 | fields=None, |
---|
821 | colnames=None, |
---|
822 | as_ordered_dict=False, |
---|
823 | ): |
---|
824 | """ |
---|
825 | Executes an arbitrary query |
---|
826 | |
---|
827 | Args: |
---|
828 | query (str): the query to submit to the backend |
---|
829 | placeholders: is optional and will always be None. |
---|
830 | If using raw SQL with placeholders, placeholders may be |
---|
831 | a sequence of values to be substituted in |
---|
832 | or, (if supported by the DB driver), a dictionary with keys |
---|
833 | matching named placeholders in your SQL. |
---|
834 | as_dict: will always be None when using DAL. |
---|
835 | If using raw SQL can be set to True and the results cursor |
---|
836 | returned by the DB driver will be converted to a sequence of |
---|
837 | dictionaries keyed with the db field names. Results returned |
---|
838 | with as_dict=True are the same as those returned when applying |
---|
839 | .to_list() to a DAL query. If "as_ordered_dict"=True the |
---|
840 | behaviour is the same as when "as_dict"=True with the keys |
---|
841 | (field names) guaranteed to be in the same order as returned |
---|
842 | by the select name executed on the database. |
---|
843 | fields: list of DAL Fields that match the fields returned from the |
---|
844 | DB. The Field objects should be part of one or more Table |
---|
845 | objects defined on the DAL object. The "fields" list can include |
---|
846 | one or more DAL Table objects in addition to or instead of |
---|
847 | including Field objects, or it can be just a single table |
---|
848 | (not in a list). In that case, the Field objects will be |
---|
849 | extracted from the table(s). |
---|
850 | |
---|
851 | Note: |
---|
852 | if either `fields` or `colnames` is provided, the results |
---|
853 | will be converted to a DAL `Rows` object using the |
---|
854 | `db._adapter.parse()` method |
---|
855 | colnames: list of field names in tablename.fieldname format |
---|
856 | |
---|
857 | Note: |
---|
858 | It is also possible to specify both "fields" and the associated |
---|
859 | "colnames". In that case, "fields" can also include DAL Expression |
---|
860 | objects in addition to Field objects. For Field objects in "fields", |
---|
861 | the associated "colnames" must still be in tablename.fieldname |
---|
862 | format. For Expression objects in "fields", the associated |
---|
863 | "colnames" can be any arbitrary labels. |
---|
864 | |
---|
865 | DAL Table objects referred to by "fields" or "colnames" can be dummy |
---|
866 | tables and do not have to represent any real tables in the database. |
---|
867 | Also, note that the "fields" and "colnames" must be in the |
---|
868 | same order as the fields in the results cursor returned from the DB. |
---|
869 | |
---|
870 | """ |
---|
871 | adapter = self._adapter |
---|
872 | if placeholders: |
---|
873 | adapter.execute(query, placeholders) |
---|
874 | else: |
---|
875 | adapter.execute(query) |
---|
876 | if as_dict or as_ordered_dict: |
---|
877 | if not hasattr(adapter.cursor, "description"): |
---|
878 | raise RuntimeError( |
---|
879 | "database does not support executesql(...,as_dict=True)" |
---|
880 | ) |
---|
881 | # Non-DAL legacy db query, converts cursor results to dict. |
---|
882 | # sequence of 7-item sequences. each sequence tells about a column. |
---|
883 | # first item is always the field name according to Python Database API specs |
---|
884 | columns = adapter.cursor.description |
---|
885 | # reduce the column info down to just the field names |
---|
886 | fields = colnames or [f[0] for f in columns] |
---|
887 | if len(fields) != len(set(fields)): |
---|
888 | raise RuntimeError( |
---|
889 | "Result set includes duplicate column names. Specify unique column names using the 'colnames' argument" |
---|
890 | ) |
---|
891 | #: avoid bytes strings in columns names (py3) |
---|
892 | if columns and not PY2: |
---|
893 | for i in range(0, len(fields)): |
---|
894 | if isinstance(fields[i], bytes): |
---|
895 | fields[i] = fields[i].decode("utf8") |
---|
896 | |
---|
897 | # will hold our finished resultset in a list |
---|
898 | data = adapter.fetchall() |
---|
899 | # convert the list for each row into a dictionary so it's |
---|
900 | # easier to work with. row['field_name'] rather than row[0] |
---|
901 | if as_ordered_dict: |
---|
902 | _dict = OrderedDict |
---|
903 | else: |
---|
904 | _dict = dict |
---|
905 | return [_dict(zip(fields, row)) for row in data] |
---|
906 | try: |
---|
907 | data = adapter.fetchall() |
---|
908 | except: |
---|
909 | return None |
---|
910 | if fields or colnames: |
---|
911 | fields = [] if fields is None else fields |
---|
912 | if not isinstance(fields, list): |
---|
913 | fields = [fields] |
---|
914 | extracted_fields = [] |
---|
915 | for field in fields: |
---|
916 | if isinstance(field, Table): |
---|
917 | extracted_fields.extend([f for f in field]) |
---|
918 | else: |
---|
919 | extracted_fields.append(field) |
---|
920 | if not colnames: |
---|
921 | colnames = [f.sqlsafe for f in extracted_fields] |
---|
922 | else: |
---|
923 | #: extracted_fields is empty we should make it from colnames |
---|
924 | # what 'col_fields' is for |
---|
925 | col_fields = [] # [[tablename, fieldname], ....] |
---|
926 | newcolnames = [] |
---|
927 | for tf in colnames: |
---|
928 | if "." in tf: |
---|
929 | t_f = tf.split(".") |
---|
930 | tf = ".".join(adapter.dialect.quote(f) for f in t_f) |
---|
931 | else: |
---|
932 | t_f = None |
---|
933 | if not extracted_fields: |
---|
934 | col_fields.append(t_f) |
---|
935 | newcolnames.append(tf) |
---|
936 | colnames = newcolnames |
---|
937 | data = adapter.parse( |
---|
938 | data, |
---|
939 | fields = extracted_fields or [tf and self[tf[0]][tf[1]] for tf in col_fields], |
---|
940 | colnames=colnames |
---|
941 | ) |
---|
942 | return data |
---|
943 | |
---|
944 | def _remove_references_to(self, thistable): |
---|
945 | for table in self: |
---|
946 | table._referenced_by = [ |
---|
947 | field for field in table._referenced_by if not field.table == thistable |
---|
948 | ] |
---|
949 | |
---|
950 | def has_representer(self, name): |
---|
951 | return callable(self.representers.get(name)) |
---|
952 | |
---|
953 | def represent(self, name, *args, **kwargs): |
---|
954 | return self.representers[name](*args, **kwargs) |
---|
955 | |
---|
956 | def export_to_csv_file(self, ofile, *args, **kwargs): |
---|
957 | step = long(kwargs.get("max_fetch_rows,", 500)) |
---|
958 | write_colnames = kwargs["write_colnames"] = kwargs.get("write_colnames", True) |
---|
959 | for table in self.tables: |
---|
960 | ofile.write("TABLE %s\r\n" % table) |
---|
961 | query = self._adapter.id_query(self[table]) |
---|
962 | nrows = self(query).count() |
---|
963 | kwargs["write_colnames"] = write_colnames |
---|
964 | for k in range(0, nrows, step): |
---|
965 | self(query).select(limitby=(k, k + step)).export_to_csv_file( |
---|
966 | ofile, *args, **kwargs |
---|
967 | ) |
---|
968 | kwargs["write_colnames"] = False |
---|
969 | ofile.write("\r\n\r\n") |
---|
970 | ofile.write("END") |
---|
971 | |
---|
972 | def import_from_csv_file( |
---|
973 | self, |
---|
974 | ifile, |
---|
975 | id_map=None, |
---|
976 | null="<NULL>", |
---|
977 | unique="uuid", |
---|
978 | map_tablenames=None, |
---|
979 | ignore_missing_tables=False, |
---|
980 | *args, |
---|
981 | **kwargs |
---|
982 | ): |
---|
983 | # if id_map is None: id_map={} |
---|
984 | id_offset = {} # only used if id_map is None |
---|
985 | map_tablenames = map_tablenames or {} |
---|
986 | for line in ifile: |
---|
987 | line = line.strip() |
---|
988 | if not line: |
---|
989 | continue |
---|
990 | elif line == "END": |
---|
991 | return |
---|
992 | elif not line.startswith("TABLE "): |
---|
993 | raise SyntaxError("Invalid file format") |
---|
994 | elif not line[6:] in self.tables: |
---|
995 | raise SyntaxError("Unknown table : %s" % line[6:]) |
---|
996 | else: |
---|
997 | tablename = line[6:] |
---|
998 | tablename = map_tablenames.get(tablename, tablename) |
---|
999 | if tablename is not None and tablename in self.tables: |
---|
1000 | self[tablename].import_from_csv_file( |
---|
1001 | ifile, id_map, null, unique, id_offset, *args, **kwargs |
---|
1002 | ) |
---|
1003 | elif tablename is None or ignore_missing_tables: |
---|
1004 | # skip all non-empty lines |
---|
1005 | for line in ifile: |
---|
1006 | if not line.strip(): |
---|
1007 | break |
---|
1008 | else: |
---|
1009 | raise RuntimeError( |
---|
1010 | "Unable to import table that does not exist.\nTry db.import_from_csv_file(..., map_tablenames={'table':'othertable'},ignore_missing_tables=True)" |
---|
1011 | ) |
---|
1012 | |
---|
1013 | def can_join(self): |
---|
1014 | return self._adapter.can_join() |
---|
1015 | |
---|
1016 | |
---|
1017 | def DAL_unpickler(db_uid): |
---|
1018 | return DAL("<zombie>", db_uid=db_uid) |
---|
1019 | |
---|
1020 | |
---|
1021 | def DAL_pickler(db): |
---|
1022 | return DAL_unpickler, (db._db_uid,) |
---|
1023 | |
---|
1024 | |
---|
1025 | copyreg.pickle(DAL, DAL_pickler, DAL_unpickler) |
---|