1 | # maxdb.py |
---|
2 | # |
---|
3 | # This module is part of SQLAlchemy and is released under |
---|
4 | # the MIT License: http://www.opensource.org/licenses/mit-license.php |
---|
5 | |
---|
6 | """Support for the MaxDB database. |
---|
7 | |
---|
8 | TODO: More module docs! MaxDB support is currently experimental. |
---|
9 | |
---|
10 | Overview |
---|
11 | -------- |
---|
12 | |
---|
13 | The ``maxdb`` dialect is **experimental** and has only been tested on 7.6.03.007 |
---|
14 | and 7.6.00.037. Of these, **only 7.6.03.007 will work** with SQLAlchemy's ORM. |
---|
15 | The earlier version has severe ``LEFT JOIN`` limitations and will return |
---|
16 | incorrect results from even very simple ORM queries. |
---|
17 | |
---|
18 | Only the native Python DB-API is currently supported. ODBC driver support |
---|
19 | is a future enhancement. |
---|
20 | |
---|
21 | Connecting |
---|
22 | ---------- |
---|
23 | |
---|
24 | The username is case-sensitive. If you usually connect to the |
---|
25 | database with sqlcli and other tools in lower case, you likely need to |
---|
26 | use upper case for DB-API. |
---|
27 | |
---|
28 | Implementation Notes |
---|
29 | -------------------- |
---|
30 | |
---|
31 | Also check the DatabaseNotes page on the wiki for detailed information. |
---|
32 | |
---|
33 | With the 7.6.00.37 driver and Python 2.5, it seems that all DB-API |
---|
34 | generated exceptions are broken and can cause Python to crash. |
---|
35 | |
---|
36 | For 'somecol.in_([])' to work, the IN operator's generation must be changed |
---|
37 | to cast 'NULL' to a numeric, i.e. NUM(NULL). The DB-API doesn't accept a |
---|
38 | bind parameter there, so that particular generation must inline the NULL value, |
---|
39 | which depends on [ticket:807]. |
---|
40 | |
---|
41 | The DB-API is very picky about where bind params may be used in queries. |
---|
42 | |
---|
43 | Bind params for some functions (e.g. MOD) need type information supplied. |
---|
44 | The dialect does not yet do this automatically. |
---|
45 | |
---|
46 | Max will occasionally throw up 'bad sql, compile again' exceptions for |
---|
47 | perfectly valid SQL. The dialect does not currently handle these, more |
---|
48 | research is needed. |
---|
49 | |
---|
50 | MaxDB 7.5 and Sap DB <= 7.4 reportedly do not support schemas. A very |
---|
51 | slightly different version of this dialect would be required to support |
---|
52 | those versions, and can easily be added if there is demand. Some other |
---|
53 | required components such as an Max-aware 'old oracle style' join compiler |
---|
54 | (thetas with (+) outer indicators) are already done and available for |
---|
55 | integration- email the devel list if you're interested in working on |
---|
56 | this. |
---|
57 | |
---|
58 | """ |
---|
59 | import datetime, itertools, re |
---|
60 | |
---|
61 | from sqlalchemy import exc, schema, sql, util |
---|
62 | from sqlalchemy.sql import operators as sql_operators, expression as sql_expr |
---|
63 | from sqlalchemy.sql import compiler, visitors |
---|
64 | from sqlalchemy.engine import base as engine_base, default |
---|
65 | from sqlalchemy import types as sqltypes |
---|
66 | |
---|
67 | |
---|
68 | __all__ = [ |
---|
69 | 'MaxString', 'MaxUnicode', 'MaxChar', 'MaxText', 'MaxInteger', |
---|
70 | 'MaxSmallInteger', 'MaxNumeric', 'MaxFloat', 'MaxTimestamp', |
---|
71 | 'MaxDate', 'MaxTime', 'MaxBoolean', 'MaxBlob', |
---|
72 | ] |
---|
73 | |
---|
74 | |
---|
75 | class _StringType(sqltypes.String): |
---|
76 | _type = None |
---|
77 | |
---|
78 | def __init__(self, length=None, encoding=None, **kw): |
---|
79 | super(_StringType, self).__init__(length=length, **kw) |
---|
80 | self.encoding = encoding |
---|
81 | |
---|
82 | def get_col_spec(self): |
---|
83 | if self.length is None: |
---|
84 | spec = 'LONG' |
---|
85 | else: |
---|
86 | spec = '%s(%s)' % (self._type, self.length) |
---|
87 | |
---|
88 | if self.encoding is not None: |
---|
89 | spec = ' '.join([spec, self.encoding.upper()]) |
---|
90 | return spec |
---|
91 | |
---|
92 | def bind_processor(self, dialect): |
---|
93 | if self.encoding == 'unicode': |
---|
94 | return None |
---|
95 | else: |
---|
96 | def process(value): |
---|
97 | if isinstance(value, unicode): |
---|
98 | return value.encode(dialect.encoding) |
---|
99 | else: |
---|
100 | return value |
---|
101 | return process |
---|
102 | |
---|
103 | def result_processor(self, dialect): |
---|
104 | def process(value): |
---|
105 | while True: |
---|
106 | if value is None: |
---|
107 | return None |
---|
108 | elif isinstance(value, unicode): |
---|
109 | return value |
---|
110 | elif isinstance(value, str): |
---|
111 | if self.convert_unicode or dialect.convert_unicode: |
---|
112 | return value.decode(dialect.encoding) |
---|
113 | else: |
---|
114 | return value |
---|
115 | elif hasattr(value, 'read'): |
---|
116 | # some sort of LONG, snarf and retry |
---|
117 | value = value.read(value.remainingLength()) |
---|
118 | continue |
---|
119 | else: |
---|
120 | # unexpected type, return as-is |
---|
121 | return value |
---|
122 | return process |
---|
123 | |
---|
124 | |
---|
125 | class MaxString(_StringType): |
---|
126 | _type = 'VARCHAR' |
---|
127 | |
---|
128 | def __init__(self, *a, **kw): |
---|
129 | super(MaxString, self).__init__(*a, **kw) |
---|
130 | |
---|
131 | |
---|
132 | class MaxUnicode(_StringType): |
---|
133 | _type = 'VARCHAR' |
---|
134 | |
---|
135 | def __init__(self, length=None, **kw): |
---|
136 | super(MaxUnicode, self).__init__(length=length, encoding='unicode') |
---|
137 | |
---|
138 | |
---|
139 | class MaxChar(_StringType): |
---|
140 | _type = 'CHAR' |
---|
141 | |
---|
142 | |
---|
143 | class MaxText(_StringType): |
---|
144 | _type = 'LONG' |
---|
145 | |
---|
146 | def __init__(self, *a, **kw): |
---|
147 | super(MaxText, self).__init__(*a, **kw) |
---|
148 | |
---|
149 | def get_col_spec(self): |
---|
150 | spec = 'LONG' |
---|
151 | if self.encoding is not None: |
---|
152 | spec = ' '.join((spec, self.encoding)) |
---|
153 | elif self.convert_unicode: |
---|
154 | spec = ' '.join((spec, 'UNICODE')) |
---|
155 | |
---|
156 | return spec |
---|
157 | |
---|
158 | |
---|
159 | class MaxInteger(sqltypes.Integer): |
---|
160 | def get_col_spec(self): |
---|
161 | return 'INTEGER' |
---|
162 | |
---|
163 | |
---|
164 | class MaxSmallInteger(MaxInteger): |
---|
165 | def get_col_spec(self): |
---|
166 | return 'SMALLINT' |
---|
167 | |
---|
168 | |
---|
169 | class MaxNumeric(sqltypes.Numeric): |
---|
170 | """The FIXED (also NUMERIC, DECIMAL) data type.""" |
---|
171 | |
---|
172 | def __init__(self, precision=None, scale=None, **kw): |
---|
173 | kw.setdefault('asdecimal', True) |
---|
174 | super(MaxNumeric, self).__init__(scale=scale, precision=precision, |
---|
175 | **kw) |
---|
176 | |
---|
177 | def bind_processor(self, dialect): |
---|
178 | return None |
---|
179 | |
---|
180 | def get_col_spec(self): |
---|
181 | if self.scale and self.precision: |
---|
182 | return 'FIXED(%s, %s)' % (self.precision, self.scale) |
---|
183 | elif self.precision: |
---|
184 | return 'FIXED(%s)' % self.precision |
---|
185 | else: |
---|
186 | return 'INTEGER' |
---|
187 | |
---|
188 | |
---|
189 | class MaxFloat(sqltypes.Float): |
---|
190 | """The FLOAT data type.""" |
---|
191 | |
---|
192 | def get_col_spec(self): |
---|
193 | if self.precision is None: |
---|
194 | return 'FLOAT' |
---|
195 | else: |
---|
196 | return 'FLOAT(%s)' % (self.precision,) |
---|
197 | |
---|
198 | |
---|
199 | class MaxTimestamp(sqltypes.DateTime): |
---|
200 | def get_col_spec(self): |
---|
201 | return 'TIMESTAMP' |
---|
202 | |
---|
203 | def bind_processor(self, dialect): |
---|
204 | def process(value): |
---|
205 | if value is None: |
---|
206 | return None |
---|
207 | elif isinstance(value, basestring): |
---|
208 | return value |
---|
209 | elif dialect.datetimeformat == 'internal': |
---|
210 | ms = getattr(value, 'microsecond', 0) |
---|
211 | return value.strftime("%Y%m%d%H%M%S" + ("%06u" % ms)) |
---|
212 | elif dialect.datetimeformat == 'iso': |
---|
213 | ms = getattr(value, 'microsecond', 0) |
---|
214 | return value.strftime("%Y-%m-%d %H:%M:%S." + ("%06u" % ms)) |
---|
215 | else: |
---|
216 | raise exc.InvalidRequestError( |
---|
217 | "datetimeformat '%s' is not supported." % ( |
---|
218 | dialect.datetimeformat,)) |
---|
219 | return process |
---|
220 | |
---|
221 | def result_processor(self, dialect): |
---|
222 | def process(value): |
---|
223 | if value is None: |
---|
224 | return None |
---|
225 | elif dialect.datetimeformat == 'internal': |
---|
226 | return datetime.datetime( |
---|
227 | *[int(v) |
---|
228 | for v in (value[0:4], value[4:6], value[6:8], |
---|
229 | value[8:10], value[10:12], value[12:14], |
---|
230 | value[14:])]) |
---|
231 | elif dialect.datetimeformat == 'iso': |
---|
232 | return datetime.datetime( |
---|
233 | *[int(v) |
---|
234 | for v in (value[0:4], value[5:7], value[8:10], |
---|
235 | value[11:13], value[14:16], value[17:19], |
---|
236 | value[20:])]) |
---|
237 | else: |
---|
238 | raise exc.InvalidRequestError( |
---|
239 | "datetimeformat '%s' is not supported." % ( |
---|
240 | dialect.datetimeformat,)) |
---|
241 | return process |
---|
242 | |
---|
243 | |
---|
244 | class MaxDate(sqltypes.Date): |
---|
245 | def get_col_spec(self): |
---|
246 | return 'DATE' |
---|
247 | |
---|
248 | def bind_processor(self, dialect): |
---|
249 | def process(value): |
---|
250 | if value is None: |
---|
251 | return None |
---|
252 | elif isinstance(value, basestring): |
---|
253 | return value |
---|
254 | elif dialect.datetimeformat == 'internal': |
---|
255 | return value.strftime("%Y%m%d") |
---|
256 | elif dialect.datetimeformat == 'iso': |
---|
257 | return value.strftime("%Y-%m-%d") |
---|
258 | else: |
---|
259 | raise exc.InvalidRequestError( |
---|
260 | "datetimeformat '%s' is not supported." % ( |
---|
261 | dialect.datetimeformat,)) |
---|
262 | return process |
---|
263 | |
---|
264 | def result_processor(self, dialect): |
---|
265 | def process(value): |
---|
266 | if value is None: |
---|
267 | return None |
---|
268 | elif dialect.datetimeformat == 'internal': |
---|
269 | return datetime.date( |
---|
270 | *[int(v) for v in (value[0:4], value[4:6], value[6:8])]) |
---|
271 | elif dialect.datetimeformat == 'iso': |
---|
272 | return datetime.date( |
---|
273 | *[int(v) for v in (value[0:4], value[5:7], value[8:10])]) |
---|
274 | else: |
---|
275 | raise exc.InvalidRequestError( |
---|
276 | "datetimeformat '%s' is not supported." % ( |
---|
277 | dialect.datetimeformat,)) |
---|
278 | return process |
---|
279 | |
---|
280 | |
---|
281 | class MaxTime(sqltypes.Time): |
---|
282 | def get_col_spec(self): |
---|
283 | return 'TIME' |
---|
284 | |
---|
285 | def bind_processor(self, dialect): |
---|
286 | def process(value): |
---|
287 | if value is None: |
---|
288 | return None |
---|
289 | elif isinstance(value, basestring): |
---|
290 | return value |
---|
291 | elif dialect.datetimeformat == 'internal': |
---|
292 | return value.strftime("%H%M%S") |
---|
293 | elif dialect.datetimeformat == 'iso': |
---|
294 | return value.strftime("%H-%M-%S") |
---|
295 | else: |
---|
296 | raise exc.InvalidRequestError( |
---|
297 | "datetimeformat '%s' is not supported." % ( |
---|
298 | dialect.datetimeformat,)) |
---|
299 | return process |
---|
300 | |
---|
301 | def result_processor(self, dialect): |
---|
302 | def process(value): |
---|
303 | if value is None: |
---|
304 | return None |
---|
305 | elif dialect.datetimeformat == 'internal': |
---|
306 | t = datetime.time( |
---|
307 | *[int(v) for v in (value[0:4], value[4:6], value[6:8])]) |
---|
308 | return t |
---|
309 | elif dialect.datetimeformat == 'iso': |
---|
310 | return datetime.time( |
---|
311 | *[int(v) for v in (value[0:4], value[5:7], value[8:10])]) |
---|
312 | else: |
---|
313 | raise exc.InvalidRequestError( |
---|
314 | "datetimeformat '%s' is not supported." % ( |
---|
315 | dialect.datetimeformat,)) |
---|
316 | return process |
---|
317 | |
---|
318 | |
---|
319 | class MaxBoolean(sqltypes.Boolean): |
---|
320 | def get_col_spec(self): |
---|
321 | return 'BOOLEAN' |
---|
322 | |
---|
323 | |
---|
324 | class MaxBlob(sqltypes.Binary): |
---|
325 | def get_col_spec(self): |
---|
326 | return 'LONG BYTE' |
---|
327 | |
---|
328 | def bind_processor(self, dialect): |
---|
329 | def process(value): |
---|
330 | if value is None: |
---|
331 | return None |
---|
332 | else: |
---|
333 | return str(value) |
---|
334 | return process |
---|
335 | |
---|
336 | def result_processor(self, dialect): |
---|
337 | def process(value): |
---|
338 | if value is None: |
---|
339 | return None |
---|
340 | else: |
---|
341 | return value.read(value.remainingLength()) |
---|
342 | return process |
---|
343 | |
---|
344 | |
---|
345 | colspecs = { |
---|
346 | sqltypes.Integer: MaxInteger, |
---|
347 | sqltypes.Smallinteger: MaxSmallInteger, |
---|
348 | sqltypes.Numeric: MaxNumeric, |
---|
349 | sqltypes.Float: MaxFloat, |
---|
350 | sqltypes.DateTime: MaxTimestamp, |
---|
351 | sqltypes.Date: MaxDate, |
---|
352 | sqltypes.Time: MaxTime, |
---|
353 | sqltypes.String: MaxString, |
---|
354 | sqltypes.Binary: MaxBlob, |
---|
355 | sqltypes.Boolean: MaxBoolean, |
---|
356 | sqltypes.Text: MaxText, |
---|
357 | sqltypes.CHAR: MaxChar, |
---|
358 | sqltypes.TIMESTAMP: MaxTimestamp, |
---|
359 | sqltypes.BLOB: MaxBlob, |
---|
360 | sqltypes.Unicode: MaxUnicode, |
---|
361 | } |
---|
362 | |
---|
363 | ischema_names = { |
---|
364 | 'boolean': MaxBoolean, |
---|
365 | 'char': MaxChar, |
---|
366 | 'character': MaxChar, |
---|
367 | 'date': MaxDate, |
---|
368 | 'fixed': MaxNumeric, |
---|
369 | 'float': MaxFloat, |
---|
370 | 'int': MaxInteger, |
---|
371 | 'integer': MaxInteger, |
---|
372 | 'long binary': MaxBlob, |
---|
373 | 'long unicode': MaxText, |
---|
374 | 'long': MaxText, |
---|
375 | 'long': MaxText, |
---|
376 | 'smallint': MaxSmallInteger, |
---|
377 | 'time': MaxTime, |
---|
378 | 'timestamp': MaxTimestamp, |
---|
379 | 'varchar': MaxString, |
---|
380 | } |
---|
381 | |
---|
382 | |
---|
383 | class MaxDBExecutionContext(default.DefaultExecutionContext): |
---|
384 | def post_exec(self): |
---|
385 | # DB-API bug: if there were any functions as values, |
---|
386 | # then do another select and pull CURRVAL from the |
---|
387 | # autoincrement column's implicit sequence... ugh |
---|
388 | if self.compiled.isinsert and not self.executemany: |
---|
389 | table = self.compiled.statement.table |
---|
390 | index, serial_col = _autoserial_column(table) |
---|
391 | |
---|
392 | if serial_col and (not self.compiled._safeserial or |
---|
393 | not(self._last_inserted_ids) or |
---|
394 | self._last_inserted_ids[index] in (None, 0)): |
---|
395 | if table.schema: |
---|
396 | sql = "SELECT %s.CURRVAL FROM DUAL" % ( |
---|
397 | self.compiled.preparer.format_table(table)) |
---|
398 | else: |
---|
399 | sql = "SELECT CURRENT_SCHEMA.%s.CURRVAL FROM DUAL" % ( |
---|
400 | self.compiled.preparer.format_table(table)) |
---|
401 | |
---|
402 | if self.connection.engine._should_log_info: |
---|
403 | self.connection.engine.logger.info(sql) |
---|
404 | rs = self.cursor.execute(sql) |
---|
405 | id = rs.fetchone()[0] |
---|
406 | |
---|
407 | if self.connection.engine._should_log_debug: |
---|
408 | self.connection.engine.logger.debug([id]) |
---|
409 | if not self._last_inserted_ids: |
---|
410 | # This shouldn't ever be > 1? Right? |
---|
411 | self._last_inserted_ids = \ |
---|
412 | [None] * len(table.primary_key.columns) |
---|
413 | self._last_inserted_ids[index] = id |
---|
414 | |
---|
415 | super(MaxDBExecutionContext, self).post_exec() |
---|
416 | |
---|
417 | def get_result_proxy(self): |
---|
418 | if self.cursor.description is not None: |
---|
419 | for column in self.cursor.description: |
---|
420 | if column[1] in ('Long Binary', 'Long', 'Long Unicode'): |
---|
421 | return MaxDBResultProxy(self) |
---|
422 | return engine_base.ResultProxy(self) |
---|
423 | |
---|
424 | |
---|
425 | class MaxDBCachedColumnRow(engine_base.RowProxy): |
---|
426 | """A RowProxy that only runs result_processors once per column.""" |
---|
427 | |
---|
428 | def __init__(self, parent, row): |
---|
429 | super(MaxDBCachedColumnRow, self).__init__(parent, row) |
---|
430 | self.columns = {} |
---|
431 | self._row = row |
---|
432 | self._parent = parent |
---|
433 | |
---|
434 | def _get_col(self, key): |
---|
435 | if key not in self.columns: |
---|
436 | self.columns[key] = self._parent._get_col(self._row, key) |
---|
437 | return self.columns[key] |
---|
438 | |
---|
439 | def __iter__(self): |
---|
440 | for i in xrange(len(self._row)): |
---|
441 | yield self._get_col(i) |
---|
442 | |
---|
443 | def __repr__(self): |
---|
444 | return repr(list(self)) |
---|
445 | |
---|
446 | def __eq__(self, other): |
---|
447 | return ((other is self) or |
---|
448 | (other == tuple([self._get_col(key) |
---|
449 | for key in xrange(len(self._row))]))) |
---|
450 | def __getitem__(self, key): |
---|
451 | if isinstance(key, slice): |
---|
452 | indices = key.indices(len(self._row)) |
---|
453 | return tuple([self._get_col(i) for i in xrange(*indices)]) |
---|
454 | else: |
---|
455 | return self._get_col(key) |
---|
456 | |
---|
457 | def __getattr__(self, name): |
---|
458 | try: |
---|
459 | return self._get_col(name) |
---|
460 | except KeyError: |
---|
461 | raise AttributeError(name) |
---|
462 | |
---|
463 | |
---|
464 | class MaxDBResultProxy(engine_base.ResultProxy): |
---|
465 | _process_row = MaxDBCachedColumnRow |
---|
466 | |
---|
467 | |
---|
468 | class MaxDBDialect(default.DefaultDialect): |
---|
469 | name = 'maxdb' |
---|
470 | supports_alter = True |
---|
471 | supports_unicode_statements = True |
---|
472 | max_identifier_length = 32 |
---|
473 | supports_sane_rowcount = True |
---|
474 | supports_sane_multi_rowcount = False |
---|
475 | preexecute_pk_sequences = True |
---|
476 | |
---|
477 | # MaxDB-specific |
---|
478 | datetimeformat = 'internal' |
---|
479 | |
---|
480 | def __init__(self, _raise_known_sql_errors=False, **kw): |
---|
481 | super(MaxDBDialect, self).__init__(**kw) |
---|
482 | self._raise_known = _raise_known_sql_errors |
---|
483 | |
---|
484 | if self.dbapi is None: |
---|
485 | self.dbapi_type_map = {} |
---|
486 | else: |
---|
487 | self.dbapi_type_map = { |
---|
488 | 'Long Binary': MaxBlob(), |
---|
489 | 'Long byte_t': MaxBlob(), |
---|
490 | 'Long Unicode': MaxText(), |
---|
491 | 'Timestamp': MaxTimestamp(), |
---|
492 | 'Date': MaxDate(), |
---|
493 | 'Time': MaxTime(), |
---|
494 | datetime.datetime: MaxTimestamp(), |
---|
495 | datetime.date: MaxDate(), |
---|
496 | datetime.time: MaxTime(), |
---|
497 | } |
---|
498 | |
---|
499 | def dbapi(cls): |
---|
500 | from sapdb import dbapi as _dbapi |
---|
501 | return _dbapi |
---|
502 | dbapi = classmethod(dbapi) |
---|
503 | |
---|
504 | def create_connect_args(self, url): |
---|
505 | opts = url.translate_connect_args(username='user') |
---|
506 | opts.update(url.query) |
---|
507 | return [], opts |
---|
508 | |
---|
509 | def type_descriptor(self, typeobj): |
---|
510 | if isinstance(typeobj, type): |
---|
511 | typeobj = typeobj() |
---|
512 | if isinstance(typeobj, sqltypes.Unicode): |
---|
513 | return typeobj.adapt(MaxUnicode) |
---|
514 | else: |
---|
515 | return sqltypes.adapt_type(typeobj, colspecs) |
---|
516 | |
---|
517 | def do_execute(self, cursor, statement, parameters, context=None): |
---|
518 | res = cursor.execute(statement, parameters) |
---|
519 | if isinstance(res, int) and context is not None: |
---|
520 | context._rowcount = res |
---|
521 | |
---|
522 | def do_release_savepoint(self, connection, name): |
---|
523 | # Does MaxDB truly support RELEASE SAVEPOINT <id>? All my attempts |
---|
524 | # produce "SUBTRANS COMMIT/ROLLBACK not allowed without SUBTRANS |
---|
525 | # BEGIN SQLSTATE: I7065" |
---|
526 | # Note that ROLLBACK TO works fine. In theory, a RELEASE should |
---|
527 | # just free up some transactional resources early, before the overall |
---|
528 | # COMMIT/ROLLBACK so omitting it should be relatively ok. |
---|
529 | pass |
---|
530 | |
---|
531 | def get_default_schema_name(self, connection): |
---|
532 | try: |
---|
533 | return self._default_schema_name |
---|
534 | except AttributeError: |
---|
535 | name = self.identifier_preparer._normalize_name( |
---|
536 | connection.execute('SELECT CURRENT_SCHEMA FROM DUAL').scalar()) |
---|
537 | self._default_schema_name = name |
---|
538 | return name |
---|
539 | |
---|
540 | def has_table(self, connection, table_name, schema=None): |
---|
541 | denormalize = self.identifier_preparer._denormalize_name |
---|
542 | bind = [denormalize(table_name)] |
---|
543 | if schema is None: |
---|
544 | sql = ("SELECT tablename FROM TABLES " |
---|
545 | "WHERE TABLES.TABLENAME=? AND" |
---|
546 | " TABLES.SCHEMANAME=CURRENT_SCHEMA ") |
---|
547 | else: |
---|
548 | sql = ("SELECT tablename FROM TABLES " |
---|
549 | "WHERE TABLES.TABLENAME = ? AND" |
---|
550 | " TABLES.SCHEMANAME=? ") |
---|
551 | bind.append(denormalize(schema)) |
---|
552 | |
---|
553 | rp = connection.execute(sql, bind) |
---|
554 | found = bool(rp.fetchone()) |
---|
555 | rp.close() |
---|
556 | return found |
---|
557 | |
---|
558 | def table_names(self, connection, schema): |
---|
559 | if schema is None: |
---|
560 | sql = (" SELECT TABLENAME FROM TABLES WHERE " |
---|
561 | " SCHEMANAME=CURRENT_SCHEMA ") |
---|
562 | rs = connection.execute(sql) |
---|
563 | else: |
---|
564 | sql = (" SELECT TABLENAME FROM TABLES WHERE " |
---|
565 | " SCHEMANAME=? ") |
---|
566 | matchname = self.identifier_preparer._denormalize_name(schema) |
---|
567 | rs = connection.execute(sql, matchname) |
---|
568 | normalize = self.identifier_preparer._normalize_name |
---|
569 | return [normalize(row[0]) for row in rs] |
---|
570 | |
---|
571 | def reflecttable(self, connection, table, include_columns): |
---|
572 | denormalize = self.identifier_preparer._denormalize_name |
---|
573 | normalize = self.identifier_preparer._normalize_name |
---|
574 | |
---|
575 | st = ('SELECT COLUMNNAME, MODE, DATATYPE, CODETYPE, LEN, DEC, ' |
---|
576 | ' NULLABLE, "DEFAULT", DEFAULTFUNCTION ' |
---|
577 | 'FROM COLUMNS ' |
---|
578 | 'WHERE TABLENAME=? AND SCHEMANAME=%s ' |
---|
579 | 'ORDER BY POS') |
---|
580 | |
---|
581 | fk = ('SELECT COLUMNNAME, FKEYNAME, ' |
---|
582 | ' REFSCHEMANAME, REFTABLENAME, REFCOLUMNNAME, RULE, ' |
---|
583 | ' (CASE WHEN REFSCHEMANAME = CURRENT_SCHEMA ' |
---|
584 | ' THEN 1 ELSE 0 END) AS in_schema ' |
---|
585 | 'FROM FOREIGNKEYCOLUMNS ' |
---|
586 | 'WHERE TABLENAME=? AND SCHEMANAME=%s ' |
---|
587 | 'ORDER BY FKEYNAME ') |
---|
588 | |
---|
589 | params = [denormalize(table.name)] |
---|
590 | if not table.schema: |
---|
591 | st = st % 'CURRENT_SCHEMA' |
---|
592 | fk = fk % 'CURRENT_SCHEMA' |
---|
593 | else: |
---|
594 | st = st % '?' |
---|
595 | fk = fk % '?' |
---|
596 | params.append(denormalize(table.schema)) |
---|
597 | |
---|
598 | rows = connection.execute(st, params).fetchall() |
---|
599 | if not rows: |
---|
600 | raise exc.NoSuchTableError(table.fullname) |
---|
601 | |
---|
602 | include_columns = set(include_columns or []) |
---|
603 | |
---|
604 | for row in rows: |
---|
605 | (name, mode, col_type, encoding, length, scale, |
---|
606 | nullable, constant_def, func_def) = row |
---|
607 | |
---|
608 | name = normalize(name) |
---|
609 | |
---|
610 | if include_columns and name not in include_columns: |
---|
611 | continue |
---|
612 | |
---|
613 | type_args, type_kw = [], {} |
---|
614 | if col_type == 'FIXED': |
---|
615 | type_args = length, scale |
---|
616 | # Convert FIXED(10) DEFAULT SERIAL to our Integer |
---|
617 | if (scale == 0 and |
---|
618 | func_def is not None and func_def.startswith('SERIAL')): |
---|
619 | col_type = 'INTEGER' |
---|
620 | type_args = length, |
---|
621 | elif col_type in 'FLOAT': |
---|
622 | type_args = length, |
---|
623 | elif col_type in ('CHAR', 'VARCHAR'): |
---|
624 | type_args = length, |
---|
625 | type_kw['encoding'] = encoding |
---|
626 | elif col_type == 'LONG': |
---|
627 | type_kw['encoding'] = encoding |
---|
628 | |
---|
629 | try: |
---|
630 | type_cls = ischema_names[col_type.lower()] |
---|
631 | type_instance = type_cls(*type_args, **type_kw) |
---|
632 | except KeyError: |
---|
633 | util.warn("Did not recognize type '%s' of column '%s'" % |
---|
634 | (col_type, name)) |
---|
635 | type_instance = sqltypes.NullType |
---|
636 | |
---|
637 | col_kw = {'autoincrement': False} |
---|
638 | col_kw['nullable'] = (nullable == 'YES') |
---|
639 | col_kw['primary_key'] = (mode == 'KEY') |
---|
640 | |
---|
641 | if func_def is not None: |
---|
642 | if func_def.startswith('SERIAL'): |
---|
643 | if col_kw['primary_key']: |
---|
644 | # No special default- let the standard autoincrement |
---|
645 | # support handle SERIAL pk columns. |
---|
646 | col_kw['autoincrement'] = True |
---|
647 | else: |
---|
648 | # strip current numbering |
---|
649 | col_kw['server_default'] = schema.DefaultClause( |
---|
650 | sql.text('SERIAL')) |
---|
651 | col_kw['autoincrement'] = True |
---|
652 | else: |
---|
653 | col_kw['server_default'] = schema.DefaultClause( |
---|
654 | sql.text(func_def)) |
---|
655 | elif constant_def is not None: |
---|
656 | col_kw['server_default'] = schema.DefaultClause(sql.text( |
---|
657 | "'%s'" % constant_def.replace("'", "''"))) |
---|
658 | |
---|
659 | table.append_column(schema.Column(name, type_instance, **col_kw)) |
---|
660 | |
---|
661 | fk_sets = itertools.groupby(connection.execute(fk, params), |
---|
662 | lambda row: row.FKEYNAME) |
---|
663 | for fkeyname, fkey in fk_sets: |
---|
664 | fkey = list(fkey) |
---|
665 | if include_columns: |
---|
666 | key_cols = set([r.COLUMNNAME for r in fkey]) |
---|
667 | if key_cols != include_columns: |
---|
668 | continue |
---|
669 | |
---|
670 | columns, referants = [], [] |
---|
671 | quote = self.identifier_preparer._maybe_quote_identifier |
---|
672 | |
---|
673 | for row in fkey: |
---|
674 | columns.append(normalize(row.COLUMNNAME)) |
---|
675 | if table.schema or not row.in_schema: |
---|
676 | referants.append('.'.join( |
---|
677 | [quote(normalize(row[c])) |
---|
678 | for c in ('REFSCHEMANAME', 'REFTABLENAME', |
---|
679 | 'REFCOLUMNNAME')])) |
---|
680 | else: |
---|
681 | referants.append('.'.join( |
---|
682 | [quote(normalize(row[c])) |
---|
683 | for c in ('REFTABLENAME', 'REFCOLUMNNAME')])) |
---|
684 | |
---|
685 | constraint_kw = {'name': fkeyname.lower()} |
---|
686 | if fkey[0].RULE is not None: |
---|
687 | rule = fkey[0].RULE |
---|
688 | if rule.startswith('DELETE '): |
---|
689 | rule = rule[7:] |
---|
690 | constraint_kw['ondelete'] = rule |
---|
691 | |
---|
692 | table_kw = {} |
---|
693 | if table.schema or not row.in_schema: |
---|
694 | table_kw['schema'] = normalize(fkey[0].REFSCHEMANAME) |
---|
695 | |
---|
696 | ref_key = schema._get_table_key(normalize(fkey[0].REFTABLENAME), |
---|
697 | table_kw.get('schema')) |
---|
698 | if ref_key not in table.metadata.tables: |
---|
699 | schema.Table(normalize(fkey[0].REFTABLENAME), |
---|
700 | table.metadata, |
---|
701 | autoload=True, autoload_with=connection, |
---|
702 | **table_kw) |
---|
703 | |
---|
704 | constraint = schema.ForeignKeyConstraint(columns, referants, link_to_name=True, |
---|
705 | **constraint_kw) |
---|
706 | table.append_constraint(constraint) |
---|
707 | |
---|
708 | def has_sequence(self, connection, name): |
---|
709 | # [ticket:726] makes this schema-aware. |
---|
710 | denormalize = self.identifier_preparer._denormalize_name |
---|
711 | sql = ("SELECT sequence_name FROM SEQUENCES " |
---|
712 | "WHERE SEQUENCE_NAME=? ") |
---|
713 | |
---|
714 | rp = connection.execute(sql, denormalize(name)) |
---|
715 | found = bool(rp.fetchone()) |
---|
716 | rp.close() |
---|
717 | return found |
---|
718 | |
---|
719 | |
---|
720 | class MaxDBCompiler(compiler.DefaultCompiler): |
---|
721 | operators = compiler.DefaultCompiler.operators.copy() |
---|
722 | operators[sql_operators.mod] = lambda x, y: 'mod(%s, %s)' % (x, y) |
---|
723 | |
---|
724 | function_conversion = { |
---|
725 | 'CURRENT_DATE': 'DATE', |
---|
726 | 'CURRENT_TIME': 'TIME', |
---|
727 | 'CURRENT_TIMESTAMP': 'TIMESTAMP', |
---|
728 | } |
---|
729 | |
---|
730 | # These functions must be written without parens when called with no |
---|
731 | # parameters. e.g. 'SELECT DATE FROM DUAL' not 'SELECT DATE() FROM DUAL' |
---|
732 | bare_functions = set([ |
---|
733 | 'CURRENT_SCHEMA', 'DATE', 'FALSE', 'SYSDBA', 'TIME', 'TIMESTAMP', |
---|
734 | 'TIMEZONE', 'TRANSACTION', 'TRUE', 'USER', 'UID', 'USERGROUP', |
---|
735 | 'UTCDATE', 'UTCDIFF']) |
---|
736 | |
---|
737 | def default_from(self): |
---|
738 | return ' FROM DUAL' |
---|
739 | |
---|
740 | def for_update_clause(self, select): |
---|
741 | clause = select.for_update |
---|
742 | if clause is True: |
---|
743 | return " WITH LOCK EXCLUSIVE" |
---|
744 | elif clause is None: |
---|
745 | return "" |
---|
746 | elif clause == "read": |
---|
747 | return " WITH LOCK" |
---|
748 | elif clause == "ignore": |
---|
749 | return " WITH LOCK (IGNORE) EXCLUSIVE" |
---|
750 | elif clause == "nowait": |
---|
751 | return " WITH LOCK (NOWAIT) EXCLUSIVE" |
---|
752 | elif isinstance(clause, basestring): |
---|
753 | return " WITH LOCK %s" % clause.upper() |
---|
754 | elif not clause: |
---|
755 | return "" |
---|
756 | else: |
---|
757 | return " WITH LOCK EXCLUSIVE" |
---|
758 | |
---|
759 | def apply_function_parens(self, func): |
---|
760 | if func.name.upper() in self.bare_functions: |
---|
761 | return len(func.clauses) > 0 |
---|
762 | else: |
---|
763 | return True |
---|
764 | |
---|
765 | def visit_function(self, fn, **kw): |
---|
766 | transform = self.function_conversion.get(fn.name.upper(), None) |
---|
767 | if transform: |
---|
768 | fn = fn._clone() |
---|
769 | fn.name = transform |
---|
770 | return super(MaxDBCompiler, self).visit_function(fn, **kw) |
---|
771 | |
---|
772 | def visit_cast(self, cast, **kwargs): |
---|
773 | # MaxDB only supports casts * to NUMERIC, * to VARCHAR or |
---|
774 | # date/time to VARCHAR. Casts of LONGs will fail. |
---|
775 | if isinstance(cast.type, (sqltypes.Integer, sqltypes.Numeric)): |
---|
776 | return "NUM(%s)" % self.process(cast.clause) |
---|
777 | elif isinstance(cast.type, sqltypes.String): |
---|
778 | return "CHR(%s)" % self.process(cast.clause) |
---|
779 | else: |
---|
780 | return self.process(cast.clause) |
---|
781 | |
---|
782 | def visit_sequence(self, sequence): |
---|
783 | if sequence.optional: |
---|
784 | return None |
---|
785 | else: |
---|
786 | return (self.dialect.identifier_preparer.format_sequence(sequence) + |
---|
787 | ".NEXTVAL") |
---|
788 | |
---|
789 | class ColumnSnagger(visitors.ClauseVisitor): |
---|
790 | def __init__(self): |
---|
791 | self.count = 0 |
---|
792 | self.column = None |
---|
793 | def visit_column(self, column): |
---|
794 | self.column = column |
---|
795 | self.count += 1 |
---|
796 | |
---|
797 | def _find_labeled_columns(self, columns, use_labels=False): |
---|
798 | labels = {} |
---|
799 | for column in columns: |
---|
800 | if isinstance(column, basestring): |
---|
801 | continue |
---|
802 | snagger = self.ColumnSnagger() |
---|
803 | snagger.traverse(column) |
---|
804 | if snagger.count == 1: |
---|
805 | if isinstance(column, sql_expr._Label): |
---|
806 | labels[unicode(snagger.column)] = column.name |
---|
807 | elif use_labels: |
---|
808 | labels[unicode(snagger.column)] = column._label |
---|
809 | |
---|
810 | return labels |
---|
811 | |
---|
812 | def order_by_clause(self, select): |
---|
813 | order_by = self.process(select._order_by_clause) |
---|
814 | |
---|
815 | # ORDER BY clauses in DISTINCT queries must reference aliased |
---|
816 | # inner columns by alias name, not true column name. |
---|
817 | if order_by and getattr(select, '_distinct', False): |
---|
818 | labels = self._find_labeled_columns(select.inner_columns, |
---|
819 | select.use_labels) |
---|
820 | if labels: |
---|
821 | for needs_alias in labels.keys(): |
---|
822 | r = re.compile(r'(^| )(%s)(,| |$)' % |
---|
823 | re.escape(needs_alias)) |
---|
824 | order_by = r.sub((r'\1%s\3' % labels[needs_alias]), |
---|
825 | order_by) |
---|
826 | |
---|
827 | # No ORDER BY in subqueries. |
---|
828 | if order_by: |
---|
829 | if self.is_subquery(): |
---|
830 | # It's safe to simply drop the ORDER BY if there is no |
---|
831 | # LIMIT. Right? Other dialects seem to get away with |
---|
832 | # dropping order. |
---|
833 | if select._limit: |
---|
834 | raise exc.InvalidRequestError( |
---|
835 | "MaxDB does not support ORDER BY in subqueries") |
---|
836 | else: |
---|
837 | return "" |
---|
838 | return " ORDER BY " + order_by |
---|
839 | else: |
---|
840 | return "" |
---|
841 | |
---|
842 | def get_select_precolumns(self, select): |
---|
843 | # Convert a subquery's LIMIT to TOP |
---|
844 | sql = select._distinct and 'DISTINCT ' or '' |
---|
845 | if self.is_subquery() and select._limit: |
---|
846 | if select._offset: |
---|
847 | raise exc.InvalidRequestError( |
---|
848 | 'MaxDB does not support LIMIT with an offset.') |
---|
849 | sql += 'TOP %s ' % select._limit |
---|
850 | return sql |
---|
851 | |
---|
852 | def limit_clause(self, select): |
---|
853 | # The docs say offsets are supported with LIMIT. But they're not. |
---|
854 | # TODO: maybe emulate by adding a ROWNO/ROWNUM predicate? |
---|
855 | if self.is_subquery(): |
---|
856 | # sub queries need TOP |
---|
857 | return '' |
---|
858 | elif select._offset: |
---|
859 | raise exc.InvalidRequestError( |
---|
860 | 'MaxDB does not support LIMIT with an offset.') |
---|
861 | else: |
---|
862 | return ' \n LIMIT %s' % (select._limit,) |
---|
863 | |
---|
864 | def visit_insert(self, insert): |
---|
865 | self.isinsert = True |
---|
866 | self._safeserial = True |
---|
867 | |
---|
868 | colparams = self._get_colparams(insert) |
---|
869 | for value in (insert.parameters or {}).itervalues(): |
---|
870 | if isinstance(value, sql_expr.Function): |
---|
871 | self._safeserial = False |
---|
872 | break |
---|
873 | |
---|
874 | return ''.join(('INSERT INTO ', |
---|
875 | self.preparer.format_table(insert.table), |
---|
876 | ' (', |
---|
877 | ', '.join([self.preparer.format_column(c[0]) |
---|
878 | for c in colparams]), |
---|
879 | ') VALUES (', |
---|
880 | ', '.join([c[1] for c in colparams]), |
---|
881 | ')')) |
---|
882 | |
---|
883 | |
---|
884 | class MaxDBDefaultRunner(engine_base.DefaultRunner): |
---|
885 | def visit_sequence(self, seq): |
---|
886 | if seq.optional: |
---|
887 | return None |
---|
888 | return self.execute_string("SELECT %s.NEXTVAL FROM DUAL" % ( |
---|
889 | self.dialect.identifier_preparer.format_sequence(seq))) |
---|
890 | |
---|
891 | |
---|
892 | class MaxDBIdentifierPreparer(compiler.IdentifierPreparer): |
---|
893 | reserved_words = set([ |
---|
894 | 'abs', 'absolute', 'acos', 'adddate', 'addtime', 'all', 'alpha', |
---|
895 | 'alter', 'any', 'ascii', 'asin', 'atan', 'atan2', 'avg', 'binary', |
---|
896 | 'bit', 'boolean', 'byte', 'case', 'ceil', 'ceiling', 'char', |
---|
897 | 'character', 'check', 'chr', 'column', 'concat', 'constraint', 'cos', |
---|
898 | 'cosh', 'cot', 'count', 'cross', 'curdate', 'current', 'curtime', |
---|
899 | 'database', 'date', 'datediff', 'day', 'dayname', 'dayofmonth', |
---|
900 | 'dayofweek', 'dayofyear', 'dec', 'decimal', 'decode', 'default', |
---|
901 | 'degrees', 'delete', 'digits', 'distinct', 'double', 'except', |
---|
902 | 'exists', 'exp', 'expand', 'first', 'fixed', 'float', 'floor', 'for', |
---|
903 | 'from', 'full', 'get_objectname', 'get_schema', 'graphic', 'greatest', |
---|
904 | 'group', 'having', 'hex', 'hextoraw', 'hour', 'ifnull', 'ignore', |
---|
905 | 'index', 'initcap', 'inner', 'insert', 'int', 'integer', 'internal', |
---|
906 | 'intersect', 'into', 'join', 'key', 'last', 'lcase', 'least', 'left', |
---|
907 | 'length', 'lfill', 'list', 'ln', 'locate', 'log', 'log10', 'long', |
---|
908 | 'longfile', 'lower', 'lpad', 'ltrim', 'makedate', 'maketime', |
---|
909 | 'mapchar', 'max', 'mbcs', 'microsecond', 'min', 'minute', 'mod', |
---|
910 | 'month', 'monthname', 'natural', 'nchar', 'next', 'no', 'noround', |
---|
911 | 'not', 'now', 'null', 'num', 'numeric', 'object', 'of', 'on', |
---|
912 | 'order', 'packed', 'pi', 'power', 'prev', 'primary', 'radians', |
---|
913 | 'real', 'reject', 'relative', 'replace', 'rfill', 'right', 'round', |
---|
914 | 'rowid', 'rowno', 'rpad', 'rtrim', 'second', 'select', 'selupd', |
---|
915 | 'serial', 'set', 'show', 'sign', 'sin', 'sinh', 'smallint', 'some', |
---|
916 | 'soundex', 'space', 'sqrt', 'stamp', 'statistics', 'stddev', |
---|
917 | 'subdate', 'substr', 'substring', 'subtime', 'sum', 'sysdba', |
---|
918 | 'table', 'tan', 'tanh', 'time', 'timediff', 'timestamp', 'timezone', |
---|
919 | 'to', 'toidentifier', 'transaction', 'translate', 'trim', 'trunc', |
---|
920 | 'truncate', 'ucase', 'uid', 'unicode', 'union', 'update', 'upper', |
---|
921 | 'user', 'usergroup', 'using', 'utcdate', 'utcdiff', 'value', 'values', |
---|
922 | 'varchar', 'vargraphic', 'variance', 'week', 'weekofyear', 'when', |
---|
923 | 'where', 'with', 'year', 'zoned' ]) |
---|
924 | |
---|
925 | def _normalize_name(self, name): |
---|
926 | if name is None: |
---|
927 | return None |
---|
928 | if name.isupper(): |
---|
929 | lc_name = name.lower() |
---|
930 | if not self._requires_quotes(lc_name): |
---|
931 | return lc_name |
---|
932 | return name |
---|
933 | |
---|
934 | def _denormalize_name(self, name): |
---|
935 | if name is None: |
---|
936 | return None |
---|
937 | elif (name.islower() and |
---|
938 | not self._requires_quotes(name)): |
---|
939 | return name.upper() |
---|
940 | else: |
---|
941 | return name |
---|
942 | |
---|
943 | def _maybe_quote_identifier(self, name): |
---|
944 | if self._requires_quotes(name): |
---|
945 | return self.quote_identifier(name) |
---|
946 | else: |
---|
947 | return name |
---|
948 | |
---|
949 | |
---|
950 | class MaxDBSchemaGenerator(compiler.SchemaGenerator): |
---|
951 | def get_column_specification(self, column, **kw): |
---|
952 | colspec = [self.preparer.format_column(column), |
---|
953 | column.type.dialect_impl(self.dialect).get_col_spec()] |
---|
954 | |
---|
955 | if not column.nullable: |
---|
956 | colspec.append('NOT NULL') |
---|
957 | |
---|
958 | default = column.default |
---|
959 | default_str = self.get_column_default_string(column) |
---|
960 | |
---|
961 | # No DDL default for columns specified with non-optional sequence- |
---|
962 | # this defaulting behavior is entirely client-side. (And as a |
---|
963 | # consequence, non-reflectable.) |
---|
964 | if (default and isinstance(default, schema.Sequence) and |
---|
965 | not default.optional): |
---|
966 | pass |
---|
967 | # Regular default |
---|
968 | elif default_str is not None: |
---|
969 | colspec.append('DEFAULT %s' % default_str) |
---|
970 | # Assign DEFAULT SERIAL heuristically |
---|
971 | elif column.primary_key and column.autoincrement: |
---|
972 | # For SERIAL on a non-primary key member, use |
---|
973 | # DefaultClause(text('SERIAL')) |
---|
974 | try: |
---|
975 | first = [c for c in column.table.primary_key.columns |
---|
976 | if (c.autoincrement and |
---|
977 | (isinstance(c.type, sqltypes.Integer) or |
---|
978 | (isinstance(c.type, MaxNumeric) and |
---|
979 | c.type.precision)) and |
---|
980 | not c.foreign_keys)].pop(0) |
---|
981 | if column is first: |
---|
982 | colspec.append('DEFAULT SERIAL') |
---|
983 | except IndexError: |
---|
984 | pass |
---|
985 | return ' '.join(colspec) |
---|
986 | |
---|
987 | def get_column_default_string(self, column): |
---|
988 | if isinstance(column.server_default, schema.DefaultClause): |
---|
989 | if isinstance(column.default.arg, basestring): |
---|
990 | if isinstance(column.type, sqltypes.Integer): |
---|
991 | return str(column.default.arg) |
---|
992 | else: |
---|
993 | return "'%s'" % column.default.arg |
---|
994 | else: |
---|
995 | return unicode(self._compile(column.default.arg, None)) |
---|
996 | else: |
---|
997 | return None |
---|
998 | |
---|
999 | def visit_sequence(self, sequence): |
---|
1000 | """Creates a SEQUENCE. |
---|
1001 | |
---|
1002 | TODO: move to module doc? |
---|
1003 | |
---|
1004 | start |
---|
1005 | With an integer value, set the START WITH option. |
---|
1006 | |
---|
1007 | increment |
---|
1008 | An integer value to increment by. Default is the database default. |
---|
1009 | |
---|
1010 | maxdb_minvalue |
---|
1011 | maxdb_maxvalue |
---|
1012 | With an integer value, sets the corresponding sequence option. |
---|
1013 | |
---|
1014 | maxdb_no_minvalue |
---|
1015 | maxdb_no_maxvalue |
---|
1016 | Defaults to False. If true, sets the corresponding sequence option. |
---|
1017 | |
---|
1018 | maxdb_cycle |
---|
1019 | Defaults to False. If true, sets the CYCLE option. |
---|
1020 | |
---|
1021 | maxdb_cache |
---|
1022 | With an integer value, sets the CACHE option. |
---|
1023 | |
---|
1024 | maxdb_no_cache |
---|
1025 | Defaults to False. If true, sets NOCACHE. |
---|
1026 | """ |
---|
1027 | |
---|
1028 | if (not sequence.optional and |
---|
1029 | (not self.checkfirst or |
---|
1030 | not self.dialect.has_sequence(self.connection, sequence.name))): |
---|
1031 | |
---|
1032 | ddl = ['CREATE SEQUENCE', |
---|
1033 | self.preparer.format_sequence(sequence)] |
---|
1034 | |
---|
1035 | sequence.increment = 1 |
---|
1036 | |
---|
1037 | if sequence.increment is not None: |
---|
1038 | ddl.extend(('INCREMENT BY', str(sequence.increment))) |
---|
1039 | |
---|
1040 | if sequence.start is not None: |
---|
1041 | ddl.extend(('START WITH', str(sequence.start))) |
---|
1042 | |
---|
1043 | opts = dict([(pair[0][6:].lower(), pair[1]) |
---|
1044 | for pair in sequence.kwargs.items() |
---|
1045 | if pair[0].startswith('maxdb_')]) |
---|
1046 | |
---|
1047 | if 'maxvalue' in opts: |
---|
1048 | ddl.extend(('MAXVALUE', str(opts['maxvalue']))) |
---|
1049 | elif opts.get('no_maxvalue', False): |
---|
1050 | ddl.append('NOMAXVALUE') |
---|
1051 | if 'minvalue' in opts: |
---|
1052 | ddl.extend(('MINVALUE', str(opts['minvalue']))) |
---|
1053 | elif opts.get('no_minvalue', False): |
---|
1054 | ddl.append('NOMINVALUE') |
---|
1055 | |
---|
1056 | if opts.get('cycle', False): |
---|
1057 | ddl.append('CYCLE') |
---|
1058 | |
---|
1059 | if 'cache' in opts: |
---|
1060 | ddl.extend(('CACHE', str(opts['cache']))) |
---|
1061 | elif opts.get('no_cache', False): |
---|
1062 | ddl.append('NOCACHE') |
---|
1063 | |
---|
1064 | self.append(' '.join(ddl)) |
---|
1065 | self.execute() |
---|
1066 | |
---|
1067 | |
---|
1068 | class MaxDBSchemaDropper(compiler.SchemaDropper): |
---|
1069 | def visit_sequence(self, sequence): |
---|
1070 | if (not sequence.optional and |
---|
1071 | (not self.checkfirst or |
---|
1072 | self.dialect.has_sequence(self.connection, sequence.name))): |
---|
1073 | self.append("DROP SEQUENCE %s" % |
---|
1074 | self.preparer.format_sequence(sequence)) |
---|
1075 | self.execute() |
---|
1076 | |
---|
1077 | |
---|
1078 | def _autoserial_column(table): |
---|
1079 | """Finds the effective DEFAULT SERIAL column of a Table, if any.""" |
---|
1080 | |
---|
1081 | for index, col in enumerate(table.primary_key.columns): |
---|
1082 | if (isinstance(col.type, (sqltypes.Integer, sqltypes.Numeric)) and |
---|
1083 | col.autoincrement): |
---|
1084 | if isinstance(col.default, schema.Sequence): |
---|
1085 | if col.default.optional: |
---|
1086 | return index, col |
---|
1087 | elif (col.default is None or |
---|
1088 | (not isinstance(col.server_default, schema.DefaultClause))): |
---|
1089 | return index, col |
---|
1090 | |
---|
1091 | return None, None |
---|
1092 | |
---|
1093 | dialect = MaxDBDialect |
---|
1094 | dialect.preparer = MaxDBIdentifierPreparer |
---|
1095 | dialect.statement_compiler = MaxDBCompiler |
---|
1096 | dialect.schemagenerator = MaxDBSchemaGenerator |
---|
1097 | dialect.schemadropper = MaxDBSchemaDropper |
---|
1098 | dialect.defaultrunner = MaxDBDefaultRunner |
---|
1099 | dialect.execution_ctx_cls = MaxDBExecutionContext |
---|