1 | import copy |
---|
2 | import datetime |
---|
3 | import locale |
---|
4 | import os |
---|
5 | import pickle |
---|
6 | import sys |
---|
7 | from ._compat import PY2, string_types, pjoin, iteritems, to_bytes, exists |
---|
8 | from ._load import portalocker |
---|
9 | from .helpers.classes import SQLCustomType, DatabaseStoredFile |
---|
10 | |
---|
11 | |
---|
12 | class Migrator(object): |
---|
13 | def __init__(self, adapter): |
---|
14 | self.adapter = adapter |
---|
15 | |
---|
16 | @property |
---|
17 | def db(self): |
---|
18 | return self.adapter.db |
---|
19 | |
---|
20 | @property |
---|
21 | def dialect(self): |
---|
22 | return self.adapter.dialect |
---|
23 | |
---|
24 | @property |
---|
25 | def dbengine(self): |
---|
26 | return self.adapter.dbengine |
---|
27 | |
---|
28 | def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None): |
---|
29 | db = table._db |
---|
30 | table._migrate = migrate |
---|
31 | fields = [] |
---|
32 | # PostGIS geo fields are added after the table has been created |
---|
33 | postcreation_fields = [] |
---|
34 | sql_fields = {} |
---|
35 | sql_fields_aux = {} |
---|
36 | TFK = {} |
---|
37 | tablename = table._tablename |
---|
38 | types = self.adapter.types |
---|
39 | for sortable, field in enumerate(table, start=1): |
---|
40 | if self.db._ignore_field_case: |
---|
41 | field_name = field.name.lower() |
---|
42 | field_rname = field._rname.lower() |
---|
43 | else: |
---|
44 | field_name = field.name |
---|
45 | field_rname = field._rname |
---|
46 | if self.dbengine == "oracle": |
---|
47 | # Oracle needs all field names quoted to ensure consistent case |
---|
48 | field_rname = self.dialect.quote(field_rname) |
---|
49 | field_type = field.type |
---|
50 | if isinstance(field_type, SQLCustomType): |
---|
51 | ftype = field_type.native or field_type.type |
---|
52 | elif field_type.startswith(("reference", "big-reference")): |
---|
53 | if field_type.startswith("reference"): |
---|
54 | referenced = field_type[10:].strip() |
---|
55 | type_name = "reference" |
---|
56 | else: |
---|
57 | referenced = field_type[14:].strip() |
---|
58 | type_name = "big-reference" |
---|
59 | |
---|
60 | if referenced == ".": |
---|
61 | referenced = tablename |
---|
62 | constraint_name = self.dialect.constraint_name( |
---|
63 | table._raw_rname, field._raw_rname |
---|
64 | ) |
---|
65 | # if not '.' in referenced \ |
---|
66 | # and referenced != tablename \ |
---|
67 | # and hasattr(table,'_primarykey'): |
---|
68 | # ftype = types['integer'] |
---|
69 | # else: |
---|
70 | try: |
---|
71 | rtable = db[referenced] |
---|
72 | rfield = rtable._id |
---|
73 | rfieldname = rfield.name |
---|
74 | rtablename = referenced |
---|
75 | except (KeyError, ValueError, AttributeError) as e: |
---|
76 | self.db.logger.debug("Error: %s" % e) |
---|
77 | try: |
---|
78 | rtablename, rfieldname = referenced.split(".") |
---|
79 | rtable = db[rtablename] |
---|
80 | rfield = rtable[rfieldname] |
---|
81 | except Exception as e: |
---|
82 | self.db.logger.debug("Error: %s" % e) |
---|
83 | raise KeyError( |
---|
84 | "Cannot resolve reference %s in %s definition" |
---|
85 | % (referenced, table._tablename) |
---|
86 | ) |
---|
87 | |
---|
88 | # must be PK reference or unique |
---|
89 | rfield_rname = rfield._rname |
---|
90 | if self.dbengine == "oracle": |
---|
91 | rfield_rname = self.dialect.quote(rfield_rname) |
---|
92 | |
---|
93 | if ( |
---|
94 | not rfield.type.startswith(("reference", "big-reference")) |
---|
95 | and getattr(rtable, "_primarykey", None) |
---|
96 | and rfieldname in rtable._primarykey |
---|
97 | or rfield.unique |
---|
98 | ): |
---|
99 | ftype = types[rfield.type[:9]] % dict(length=rfield.length) |
---|
100 | # multicolumn primary key reference? |
---|
101 | if not rfield.unique and len(rtable._primarykey) > 1: |
---|
102 | # then it has to be a table level FK |
---|
103 | if rtablename not in TFK: |
---|
104 | TFK[rtablename] = {} |
---|
105 | TFK[rtablename][rfieldname] = field_name |
---|
106 | else: |
---|
107 | fk = rtable._rname + " (" + rfield._rname + ")" |
---|
108 | if self.dbengine == "oracle": |
---|
109 | fk = ( |
---|
110 | self.dialect.quote(rtable._rname) |
---|
111 | + " (" |
---|
112 | + rfield_rname |
---|
113 | + ")" |
---|
114 | ) |
---|
115 | ftype = ftype + types["reference FK"] % dict( |
---|
116 | # should be quoted |
---|
117 | constraint_name=constraint_name, |
---|
118 | foreign_key=fk, |
---|
119 | table_name=table._rname, |
---|
120 | field_name=field._rname, |
---|
121 | on_delete_action=field.ondelete, |
---|
122 | ) |
---|
123 | else: |
---|
124 | # make a guess here for circular references |
---|
125 | if referenced in db: |
---|
126 | id_fieldname = db[referenced]._id._rname |
---|
127 | elif referenced == tablename: |
---|
128 | id_fieldname = table._id._rname |
---|
129 | else: # make a guess |
---|
130 | id_fieldname = self.dialect.quote("id") |
---|
131 | # gotcha: the referenced table must be defined before |
---|
132 | # the referencing one to be able to create the table |
---|
133 | # Also if it's not recommended, we can still support |
---|
134 | # references to tablenames without rname to make |
---|
135 | # migrations and model relationship work also if tables |
---|
136 | # are not defined in order |
---|
137 | if referenced == tablename: |
---|
138 | real_referenced = db[referenced]._rname |
---|
139 | else: |
---|
140 | real_referenced = ( |
---|
141 | referenced in db and db[referenced]._rname or referenced |
---|
142 | ) |
---|
143 | if self.dbengine == "oracle": |
---|
144 | real_referenced = self.dialect.quote(real_referenced) |
---|
145 | rfield = db[referenced]._id |
---|
146 | ftype_info = dict( |
---|
147 | index_name=self.dialect.quote(field._raw_rname + "__idx"), |
---|
148 | field_name=field_rname, |
---|
149 | constraint_name=self.dialect.quote(constraint_name), |
---|
150 | foreign_key="%s (%s)" % (real_referenced, rfield_rname), |
---|
151 | on_delete_action=field.ondelete, |
---|
152 | ) |
---|
153 | ftype_info["null"] = ( |
---|
154 | " NOT NULL" if field.notnull else self.dialect.allow_null |
---|
155 | ) |
---|
156 | ftype_info["unique"] = " UNIQUE" if field.unique else "" |
---|
157 | ftype = types[type_name] % ftype_info |
---|
158 | elif field_type.startswith("list:reference"): |
---|
159 | ftype = types[field_type[:14]] |
---|
160 | elif field_type.startswith("decimal"): |
---|
161 | precision, scale = map(int, field_type[8:-1].split(",")) |
---|
162 | ftype = types[field_type[:7]] % dict(precision=precision, scale=scale) |
---|
163 | elif field_type.startswith("geo"): |
---|
164 | if not hasattr(self.adapter, "srid"): |
---|
165 | raise RuntimeError("Adapter does not support geometry") |
---|
166 | srid = self.adapter.srid |
---|
167 | geotype, parms = field_type[:-1].split("(") |
---|
168 | if geotype not in types: |
---|
169 | raise SyntaxError( |
---|
170 | "Field: unknown field type: %s for %s" |
---|
171 | % (field_type, field_name) |
---|
172 | ) |
---|
173 | ftype = types[geotype] |
---|
174 | if self.dbengine == "postgres" and geotype == "geometry": |
---|
175 | if self.db._ignore_field_case is True: |
---|
176 | field_name = field_name.lower() |
---|
177 | # parameters: schema, srid, dimension |
---|
178 | dimension = 2 # GIS.dimension ??? |
---|
179 | parms = parms.split(",") |
---|
180 | if len(parms) == 3: |
---|
181 | schema, srid, dimension = parms |
---|
182 | elif len(parms) == 2: |
---|
183 | schema, srid = parms |
---|
184 | else: |
---|
185 | schema = parms[0] |
---|
186 | ftype = ( |
---|
187 | "SELECT AddGeometryColumn ('%%(schema)s', '%%(tablename)s', '%%(fieldname)s', %%(srid)s, '%s', %%(dimension)s);" |
---|
188 | % types[geotype] |
---|
189 | ) |
---|
190 | ftype = ftype % dict( |
---|
191 | schema=schema, |
---|
192 | tablename=table._raw_rname, |
---|
193 | fieldname=field._raw_rname, |
---|
194 | srid=srid, |
---|
195 | dimension=dimension, |
---|
196 | ) |
---|
197 | postcreation_fields.append(ftype) |
---|
198 | elif field_type not in types: |
---|
199 | raise SyntaxError( |
---|
200 | "Field: unknown field type: %s for %s" % (field_type, field_name) |
---|
201 | ) |
---|
202 | else: |
---|
203 | ftype = types[field_type] % {"length": field.length} |
---|
204 | |
---|
205 | if not field_type.startswith(("id", "reference", "big-reference")): |
---|
206 | if field.notnull: |
---|
207 | ftype += " NOT NULL" |
---|
208 | else: |
---|
209 | ftype += self.dialect.allow_null |
---|
210 | if field.unique: |
---|
211 | ftype += " UNIQUE" |
---|
212 | if field.custom_qualifier: |
---|
213 | ftype += " %s" % field.custom_qualifier |
---|
214 | |
---|
215 | # add to list of fields |
---|
216 | sql_fields[field_name] = dict( |
---|
217 | length=field.length, |
---|
218 | unique=field.unique, |
---|
219 | notnull=field.notnull, |
---|
220 | sortable=sortable, |
---|
221 | type=str(field_type), |
---|
222 | sql=ftype, |
---|
223 | rname=field_rname, |
---|
224 | raw_rname=field._raw_rname, |
---|
225 | ) |
---|
226 | |
---|
227 | if field.notnull and field.default is not None: |
---|
228 | # Caveat: sql_fields and sql_fields_aux |
---|
229 | # differ for default values. |
---|
230 | # sql_fields is used to trigger migrations and sql_fields_aux |
---|
231 | # is used for create tables. |
---|
232 | # The reason is that we do not want to trigger |
---|
233 | # a migration simply because a default value changes. |
---|
234 | not_null = self.dialect.not_null(field.default, field_type) |
---|
235 | ftype = ftype.replace("NOT NULL", not_null) |
---|
236 | sql_fields_aux[field_name] = dict(sql=ftype) |
---|
237 | # Postgres - PostGIS: |
---|
238 | # geometry fields are added after the table has been created, not now |
---|
239 | if not (self.dbengine == "postgres" and field_type.startswith("geom")): |
---|
240 | fields.append("%s %s" % (field_rname, ftype)) |
---|
241 | other = ";" |
---|
242 | |
---|
243 | # backend-specific extensions to fields |
---|
244 | if self.dbengine == "mysql": |
---|
245 | if not hasattr(table, "_primarykey"): |
---|
246 | fields.append("PRIMARY KEY (%s)" % (table._id._rname)) |
---|
247 | engine = self.adapter.adapter_args.get("engine", "InnoDB") |
---|
248 | other = " ENGINE=%s CHARACTER SET utf8;" % engine |
---|
249 | |
---|
250 | fields = ",\n ".join(fields) |
---|
251 | for rtablename in TFK: |
---|
252 | rtable = db[rtablename] |
---|
253 | rfields = TFK[rtablename] |
---|
254 | pkeys = [rtable[pk]._rname for pk in rtable._primarykey] |
---|
255 | fk_fields = [table[rfields[k]] for k in rtable._primarykey] |
---|
256 | fkeys = [f._rname for f in fk_fields] |
---|
257 | constraint_name = self.dialect.constraint_name( |
---|
258 | table._raw_rname, "_".join(f._raw_rname for f in fk_fields) |
---|
259 | ) |
---|
260 | on_delete = list(set(f.ondelete for f in fk_fields)) |
---|
261 | if len(on_delete) > 1: |
---|
262 | raise SyntaxError( |
---|
263 | "Table %s has incompatible ON DELETE actions in multi-field foreign key." |
---|
264 | % table._dalname |
---|
265 | ) |
---|
266 | tfk_field_name = ", ".join(fkeys) |
---|
267 | tfk_foreign_key = ", ".join(pkeys) |
---|
268 | tfk_foreign_table = rtable._rname |
---|
269 | if self.dbengine == "oracle": |
---|
270 | tfk_field_name = ", ".join([self.dialect.quote(fkey) for fkey in fkeys]) |
---|
271 | tfk_foreign_key = ", ".join( |
---|
272 | [self.dialect.quote(pkey) for pkey in pkeys] |
---|
273 | ) |
---|
274 | tfk_foreign_table = self.dialect.quote(rtable._rname) |
---|
275 | fields = ( |
---|
276 | fields |
---|
277 | + ",\n " |
---|
278 | + types["reference TFK"] |
---|
279 | % dict( |
---|
280 | constraint_name=constraint_name, |
---|
281 | table_name=table._rname, |
---|
282 | field_name=tfk_field_name, |
---|
283 | foreign_table=tfk_foreign_table, |
---|
284 | foreign_key=tfk_foreign_key, |
---|
285 | on_delete_action=on_delete[0], |
---|
286 | ) |
---|
287 | ) |
---|
288 | |
---|
289 | table_rname = table._rname |
---|
290 | if self.dbengine == "oracle": |
---|
291 | # must be explicitly quoted to preserve case |
---|
292 | table_rname = self.dialect.quote(table_rname) |
---|
293 | |
---|
294 | if getattr(table, "_primarykey", None): |
---|
295 | query = "CREATE TABLE %s(\n %s,\n %s) %s" % ( |
---|
296 | table_rname, |
---|
297 | fields, |
---|
298 | self.dialect.primary_key( |
---|
299 | ", ".join([table[pk]._rname for pk in table._primarykey]) |
---|
300 | ), |
---|
301 | other, |
---|
302 | ) |
---|
303 | else: |
---|
304 | query = "CREATE TABLE %s(\n %s\n)%s" % (table_rname, fields, other) |
---|
305 | |
---|
306 | uri = self.adapter.uri |
---|
307 | if uri.startswith("sqlite:///") or uri.startswith("spatialite:///"): |
---|
308 | if PY2: |
---|
309 | path_encoding = ( |
---|
310 | sys.getfilesystemencoding() |
---|
311 | or locale.getdefaultlocale()[1] |
---|
312 | or "utf8" |
---|
313 | ) |
---|
314 | dbpath = uri[9 : uri.rfind("/")].decode("utf8").encode(path_encoding) |
---|
315 | else: |
---|
316 | dbpath = uri[9 : uri.rfind("/")] |
---|
317 | else: |
---|
318 | dbpath = self.adapter.folder |
---|
319 | |
---|
320 | if not migrate: |
---|
321 | return query |
---|
322 | elif uri.startswith("sqlite:memory") or uri.startswith("spatialite:memory"): |
---|
323 | table._dbt = None |
---|
324 | elif isinstance(migrate, string_types): |
---|
325 | table._dbt = pjoin(dbpath, migrate) |
---|
326 | else: |
---|
327 | table._dbt = pjoin(dbpath, "%s_%s.table" % (db._uri_hash, tablename)) |
---|
328 | |
---|
329 | if not table._dbt or not self.file_exists(table._dbt): |
---|
330 | if table._dbt: |
---|
331 | self.log( |
---|
332 | "timestamp: %s\n%s\n" |
---|
333 | % (datetime.datetime.today().isoformat(), query), |
---|
334 | table, |
---|
335 | ) |
---|
336 | if not fake_migrate: |
---|
337 | self.adapter.create_sequence_and_triggers(query, table) |
---|
338 | db.commit() |
---|
339 | # Postgres geom fields are added now, |
---|
340 | # after the table has been created |
---|
341 | for query in postcreation_fields: |
---|
342 | self.adapter.execute(query) |
---|
343 | db.commit() |
---|
344 | if table._dbt: |
---|
345 | tfile = self.file_open(table._dbt, "wb") |
---|
346 | pickle.dump(sql_fields, tfile) |
---|
347 | self.file_close(tfile) |
---|
348 | if fake_migrate: |
---|
349 | self.log("faked!\n", table) |
---|
350 | else: |
---|
351 | self.log("success!\n", table) |
---|
352 | else: |
---|
353 | tfile = self.file_open(table._dbt, "rb") |
---|
354 | try: |
---|
355 | sql_fields_old = pickle.load(tfile) |
---|
356 | except EOFError: |
---|
357 | self.file_close(tfile) |
---|
358 | raise RuntimeError("File %s appears corrupted" % table._dbt) |
---|
359 | self.file_close(tfile) |
---|
360 | # add missing rnames |
---|
361 | for key, item in sql_fields_old.items(): |
---|
362 | tmp = sql_fields.get(key) |
---|
363 | if tmp: |
---|
364 | item.setdefault("rname", tmp["rname"]) |
---|
365 | item.setdefault("raw_rname", tmp["raw_rname"]) |
---|
366 | else: |
---|
367 | item.setdefault("rname", self.dialect.quote(key)) |
---|
368 | item.setdefault("raw_rname", key) |
---|
369 | if sql_fields != sql_fields_old: |
---|
370 | self.migrate_table( |
---|
371 | table, |
---|
372 | sql_fields, |
---|
373 | sql_fields_old, |
---|
374 | sql_fields_aux, |
---|
375 | None, |
---|
376 | fake_migrate=fake_migrate, |
---|
377 | ) |
---|
378 | return query |
---|
379 | |
---|
380 | def _fix(self, item): |
---|
381 | k, v = item |
---|
382 | if self.dbengine == "oracle" and "rname" in v: |
---|
383 | v["rname"] = self.dialect.quote(v["rname"]) |
---|
384 | if not isinstance(v, dict): |
---|
385 | v = dict(type="unknown", sql=v) |
---|
386 | if self.db._ignore_field_case is not True: |
---|
387 | return k, v |
---|
388 | return k.lower(), v |
---|
389 | |
---|
390 | def migrate_table( |
---|
391 | self, |
---|
392 | table, |
---|
393 | sql_fields, |
---|
394 | sql_fields_old, |
---|
395 | sql_fields_aux, |
---|
396 | logfile, |
---|
397 | fake_migrate=False, |
---|
398 | ): |
---|
399 | # logfile is deprecated (moved to adapter.log method) |
---|
400 | db = table._db |
---|
401 | db._migrated.append(table._tablename) |
---|
402 | tablename = table._tablename |
---|
403 | if self.dbengine in ("firebird",): |
---|
404 | drop_expr = "ALTER TABLE %s DROP %s;" |
---|
405 | else: |
---|
406 | drop_expr = "ALTER TABLE %s DROP COLUMN %s;" |
---|
407 | field_types = dict( |
---|
408 | (x.lower(), table[x].type) for x in sql_fields.keys() if x in table |
---|
409 | ) |
---|
410 | # make sure all field names are lower case to avoid |
---|
411 | # migrations because of case change |
---|
412 | sql_fields = dict(map(self._fix, iteritems(sql_fields))) |
---|
413 | sql_fields_old = dict(map(self._fix, iteritems(sql_fields_old))) |
---|
414 | sql_fields_aux = dict(map(self._fix, iteritems(sql_fields_aux))) |
---|
415 | |
---|
416 | table_rname = table._rname |
---|
417 | if self.dbengine == "oracle": |
---|
418 | table_rname = self.dialect.quote(table_rname) |
---|
419 | |
---|
420 | if db._debug: |
---|
421 | db.logger.debug("migrating %s to %s" % (sql_fields_old, sql_fields)) |
---|
422 | |
---|
423 | keys = list(sql_fields.keys()) |
---|
424 | for key in sql_fields_old: |
---|
425 | if key not in keys: |
---|
426 | keys.append(key) |
---|
427 | new_add = self.dialect.concat_add(table_rname) |
---|
428 | |
---|
429 | metadata_change = False |
---|
430 | sql_fields_current = copy.copy(sql_fields_old) |
---|
431 | for key in keys: |
---|
432 | query = None |
---|
433 | if key not in sql_fields_old: |
---|
434 | sql_fields_current[key] = sql_fields[key] |
---|
435 | if self.dbengine in ("postgres",) and sql_fields[key][ |
---|
436 | "type" |
---|
437 | ].startswith("geometry"): |
---|
438 | # 'sql' == ftype in sql |
---|
439 | query = [sql_fields[key]["sql"]] |
---|
440 | else: |
---|
441 | query = [ |
---|
442 | "ALTER TABLE %s ADD %s %s;" |
---|
443 | % ( |
---|
444 | table_rname, |
---|
445 | sql_fields[key]["rname"], |
---|
446 | sql_fields_aux[key]["sql"].replace(", ", new_add), |
---|
447 | ) |
---|
448 | ] |
---|
449 | metadata_change = True |
---|
450 | elif self.dbengine in ("sqlite", "spatialite"): |
---|
451 | if key in sql_fields: |
---|
452 | sql_fields_current[key] = sql_fields[key] |
---|
453 | # Field rname has changes, add new column |
---|
454 | if ( |
---|
455 | sql_fields[key]["raw_rname"].lower() |
---|
456 | != sql_fields_old[key]["raw_rname"].lower() |
---|
457 | ): |
---|
458 | tt = sql_fields_aux[key]["sql"].replace(", ", new_add) |
---|
459 | query = [ |
---|
460 | "ALTER TABLE %s ADD %s %s;" |
---|
461 | % (table_rname, sql_fields[key]["rname"], tt), |
---|
462 | "UPDATE %s SET %s=%s;" |
---|
463 | % ( |
---|
464 | table_rname, |
---|
465 | sql_fields[key]["rname"], |
---|
466 | sql_fields_old[key]["rname"], |
---|
467 | ), |
---|
468 | ] |
---|
469 | metadata_change = True |
---|
470 | elif key not in sql_fields: |
---|
471 | del sql_fields_current[key] |
---|
472 | ftype = sql_fields_old[key]["type"] |
---|
473 | if self.dbengine == "postgres" and ftype.startswith("geometry"): |
---|
474 | geotype, parms = ftype[:-1].split("(") |
---|
475 | schema = parms.split(",")[0] |
---|
476 | query = [ |
---|
477 | "SELECT DropGeometryColumn ('%(schema)s', \ |
---|
478 | '%(table)s', '%(field)s');" |
---|
479 | % dict( |
---|
480 | schema=schema, |
---|
481 | table=table._raw_rname, |
---|
482 | field=sql_fields_old[key]["raw_rname"], |
---|
483 | ) |
---|
484 | ] |
---|
485 | else: |
---|
486 | query = [drop_expr % (table._rname, sql_fields_old[key]["rname"])] |
---|
487 | metadata_change = True |
---|
488 | # The field has a new rname, temp field is not needed |
---|
489 | elif ( |
---|
490 | sql_fields[key]["raw_rname"].lower() |
---|
491 | != sql_fields_old[key]["raw_rname"].lower() |
---|
492 | ): |
---|
493 | sql_fields_current[key] = sql_fields[key] |
---|
494 | tt = sql_fields_aux[key]["sql"].replace(", ", new_add) |
---|
495 | query = [ |
---|
496 | "ALTER TABLE %s ADD %s %s;" |
---|
497 | % (table_rname, sql_fields[key]["rname"], tt), |
---|
498 | "UPDATE %s SET %s=%s;" |
---|
499 | % ( |
---|
500 | table_rname, |
---|
501 | sql_fields[key]["rname"], |
---|
502 | sql_fields_old[key]["rname"], |
---|
503 | ), |
---|
504 | drop_expr % (table_rname, sql_fields_old[key]["rname"]), |
---|
505 | ] |
---|
506 | metadata_change = True |
---|
507 | elif ( |
---|
508 | sql_fields[key]["sql"] != sql_fields_old[key]["sql"] |
---|
509 | and not isinstance(field_types.get(key), SQLCustomType) |
---|
510 | and not sql_fields[key]["type"].startswith("reference") |
---|
511 | and not sql_fields[key]["type"].startswith("double") |
---|
512 | and not sql_fields[key]["type"].startswith("id") |
---|
513 | ): |
---|
514 | sql_fields_current[key] = sql_fields[key] |
---|
515 | tt = sql_fields_aux[key]["sql"].replace(", ", new_add) |
---|
516 | key_tmp = self.dialect.quote(key + "__tmp") |
---|
517 | query = [ |
---|
518 | "ALTER TABLE %s ADD %s %s;" % (table_rname, key_tmp, tt), |
---|
519 | "UPDATE %s SET %s=%s;" |
---|
520 | % (table_rname, key_tmp, sql_fields_old[key]["rname"]), |
---|
521 | drop_expr % (table_rname, sql_fields_old[key]["rname"]), |
---|
522 | "ALTER TABLE %s ADD %s %s;" |
---|
523 | % (table_rname, sql_fields[key]["rname"], tt), |
---|
524 | "UPDATE %s SET %s=%s;" |
---|
525 | % (table_rname, sql_fields[key]["rname"], key_tmp), |
---|
526 | drop_expr % (table_rname, key_tmp), |
---|
527 | ] |
---|
528 | metadata_change = True |
---|
529 | elif sql_fields[key] != sql_fields_old[key]: |
---|
530 | sql_fields_current[key] = sql_fields[key] |
---|
531 | metadata_change = True |
---|
532 | |
---|
533 | if query: |
---|
534 | self.log( |
---|
535 | "timestamp: %s\n" % datetime.datetime.today().isoformat(), table |
---|
536 | ) |
---|
537 | for sub_query in query: |
---|
538 | self.log(sub_query + "\n", table) |
---|
539 | if fake_migrate: |
---|
540 | if db._adapter.commit_on_alter_table: |
---|
541 | self.save_dbt(table, sql_fields_current) |
---|
542 | self.log("faked!\n", table) |
---|
543 | else: |
---|
544 | self.adapter.execute(sub_query) |
---|
545 | # Caveat: mysql, oracle and firebird |
---|
546 | # do not allow multiple alter table |
---|
547 | # in one transaction so we must commit |
---|
548 | # partial transactions and |
---|
549 | # update table._dbt after alter table. |
---|
550 | if db._adapter.commit_on_alter_table: |
---|
551 | db.commit() |
---|
552 | self.save_dbt(table, sql_fields_current) |
---|
553 | self.log("success!\n", table) |
---|
554 | |
---|
555 | elif metadata_change: |
---|
556 | self.save_dbt(table, sql_fields_current) |
---|
557 | |
---|
558 | if metadata_change and not (query and db._adapter.commit_on_alter_table): |
---|
559 | db.commit() |
---|
560 | self.save_dbt(table, sql_fields_current) |
---|
561 | self.log("success!\n", table) |
---|
562 | |
---|
563 | def save_dbt(self, table, sql_fields_current): |
---|
564 | tfile = self.file_open(table._dbt, "wb") |
---|
565 | pickle.dump(sql_fields_current, tfile) |
---|
566 | self.file_close(tfile) |
---|
567 | |
---|
568 | def log(self, message, table=None): |
---|
569 | isabs = None |
---|
570 | logfilename = self.adapter.adapter_args.get("logfile", "sql.log") |
---|
571 | writelog = bool(logfilename) |
---|
572 | if writelog: |
---|
573 | isabs = os.path.isabs(logfilename) |
---|
574 | if table and table._dbt and writelog and self.adapter.folder: |
---|
575 | if isabs: |
---|
576 | table._loggername = logfilename |
---|
577 | else: |
---|
578 | table._loggername = pjoin(self.adapter.folder, logfilename) |
---|
579 | logfile = self.file_open(table._loggername, "ab") |
---|
580 | logfile.write(to_bytes(message)) |
---|
581 | self.file_close(logfile) |
---|
582 | |
---|
583 | @staticmethod |
---|
584 | def file_open(filename, mode="rb", lock=True): |
---|
585 | # to be used ONLY for files that on GAE may not be on filesystem |
---|
586 | if lock: |
---|
587 | fileobj = portalocker.LockedFile(filename, mode) |
---|
588 | else: |
---|
589 | fileobj = open(filename, mode) |
---|
590 | return fileobj |
---|
591 | |
---|
592 | @staticmethod |
---|
593 | def file_close(fileobj): |
---|
594 | # to be used ONLY for files that on GAE may not be on filesystem |
---|
595 | if fileobj: |
---|
596 | fileobj.close() |
---|
597 | |
---|
598 | @staticmethod |
---|
599 | def file_delete(filename): |
---|
600 | os.unlink(filename) |
---|
601 | |
---|
602 | @staticmethod |
---|
603 | def file_exists(filename): |
---|
604 | # to be used ONLY for files that on GAE may not be on filesystem |
---|
605 | return exists(filename) |
---|
606 | |
---|
607 | |
---|
608 | class InDBMigrator(Migrator): |
---|
609 | def file_exists(self, filename): |
---|
610 | return DatabaseStoredFile.exists(self.db, filename) |
---|
611 | |
---|
612 | def file_open(self, filename, mode="rb", lock=True): |
---|
613 | return DatabaseStoredFile(self.db, filename, mode) |
---|
614 | |
---|
615 | @staticmethod |
---|
616 | def file_close(fileobj): |
---|
617 | fileobj.close_connection() |
---|
618 | |
---|
619 | def file_delete(self, filename): |
---|
620 | query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename |
---|
621 | self.db.executesql(query) |
---|
622 | self.db.commit() |
---|