| 1 | # sqlite.py |
|---|
| 2 | # Copyright (C) 2005, 2006, 2007, 2008, 2009 Michael Bayer mike_mp@zzzcomputing.com |
|---|
| 3 | # |
|---|
| 4 | # This module is part of SQLAlchemy and is released under |
|---|
| 5 | # the MIT License: http://www.opensource.org/licenses/mit-license.php |
|---|
| 6 | |
|---|
| 7 | """Support for the SQLite database. |
|---|
| 8 | |
|---|
| 9 | Driver |
|---|
| 10 | ------ |
|---|
| 11 | |
|---|
| 12 | When using Python 2.5 and above, the built in ``sqlite3`` driver is |
|---|
| 13 | already installed and no additional installation is needed. Otherwise, |
|---|
| 14 | the ``pysqlite2`` driver needs to be present. This is the same driver as |
|---|
| 15 | ``sqlite3``, just with a different name. |
|---|
| 16 | |
|---|
| 17 | The ``pysqlite2`` driver will be loaded first, and if not found, ``sqlite3`` |
|---|
| 18 | is loaded. This allows an explicitly installed pysqlite driver to take |
|---|
| 19 | precedence over the built in one. As with all dialects, a specific |
|---|
| 20 | DBAPI module may be provided to :func:`~sqlalchemy.create_engine()` to control |
|---|
| 21 | this explicitly:: |
|---|
| 22 | |
|---|
| 23 | from sqlite3 import dbapi2 as sqlite |
|---|
| 24 | e = create_engine('sqlite:///file.db', module=sqlite) |
|---|
| 25 | |
|---|
| 26 | Full documentation on pysqlite is available at: |
|---|
| 27 | `<http://www.initd.org/pub/software/pysqlite/doc/usage-guide.html>`_ |
|---|
| 28 | |
|---|
| 29 | Connect Strings |
|---|
| 30 | --------------- |
|---|
| 31 | |
|---|
| 32 | The file specification for the SQLite database is taken as the "database" portion of |
|---|
| 33 | the URL. Note that the format of a url is:: |
|---|
| 34 | |
|---|
| 35 | driver://user:pass@host/database |
|---|
| 36 | |
|---|
| 37 | This means that the actual filename to be used starts with the characters to the |
|---|
| 38 | **right** of the third slash. So connecting to a relative filepath looks like:: |
|---|
| 39 | |
|---|
| 40 | # relative path |
|---|
| 41 | e = create_engine('sqlite:///path/to/database.db') |
|---|
| 42 | |
|---|
| 43 | An absolute path, which is denoted by starting with a slash, means you need **four** |
|---|
| 44 | slashes:: |
|---|
| 45 | |
|---|
| 46 | # absolute path |
|---|
| 47 | e = create_engine('sqlite:////path/to/database.db') |
|---|
| 48 | |
|---|
| 49 | To use a Windows path, regular drive specifications and backslashes can be used. |
|---|
| 50 | Double backslashes are probably needed:: |
|---|
| 51 | |
|---|
| 52 | # absolute path on Windows |
|---|
| 53 | e = create_engine('sqlite:///C:\\\\path\\\\to\\\\database.db') |
|---|
| 54 | |
|---|
| 55 | The sqlite ``:memory:`` identifier is the default if no filepath is present. Specify |
|---|
| 56 | ``sqlite://`` and nothing else:: |
|---|
| 57 | |
|---|
| 58 | # in-memory database |
|---|
| 59 | e = create_engine('sqlite://') |
|---|
| 60 | |
|---|
| 61 | Threading Behavior |
|---|
| 62 | ------------------ |
|---|
| 63 | |
|---|
| 64 | Pysqlite connections do not support being moved between threads, unless |
|---|
| 65 | the ``check_same_thread`` Pysqlite flag is set to ``False``. In addition, |
|---|
| 66 | when using an in-memory SQLite database, the full database exists only within |
|---|
| 67 | the scope of a single connection. It is reported that an in-memory |
|---|
| 68 | database does not support being shared between threads regardless of the |
|---|
| 69 | ``check_same_thread`` flag - which means that a multithreaded |
|---|
| 70 | application **cannot** share data from a ``:memory:`` database across threads |
|---|
| 71 | unless access to the connection is limited to a single worker thread which communicates |
|---|
| 72 | through a queueing mechanism to concurrent threads. |
|---|
| 73 | |
|---|
| 74 | To provide a default which accomodates SQLite's default threading capabilities |
|---|
| 75 | somewhat reasonably, the SQLite dialect will specify that the :class:`~sqlalchemy.pool.SingletonThreadPool` |
|---|
| 76 | be used by default. This pool maintains a single SQLite connection per thread |
|---|
| 77 | that is held open up to a count of five concurrent threads. When more than five threads |
|---|
| 78 | are used, a cleanup mechanism will dispose of excess unused connections. |
|---|
| 79 | |
|---|
| 80 | Two optional pool implementations that may be appropriate for particular SQLite usage scenarios: |
|---|
| 81 | |
|---|
| 82 | * the :class:`sqlalchemy.pool.StaticPool` might be appropriate for a multithreaded |
|---|
| 83 | application using an in-memory database, assuming the threading issues inherent in |
|---|
| 84 | pysqlite are somehow accomodated for. This pool holds persistently onto a single connection |
|---|
| 85 | which is never closed, and is returned for all requests. |
|---|
| 86 | |
|---|
| 87 | * the :class:`sqlalchemy.pool.NullPool` might be appropriate for an application that |
|---|
| 88 | makes use of a file-based sqlite database. This pool disables any actual "pooling" |
|---|
| 89 | behavior, and simply opens and closes real connections corresonding to the :func:`connect()` |
|---|
| 90 | and :func:`close()` methods. SQLite can "connect" to a particular file with very high |
|---|
| 91 | efficiency, so this option may actually perform better without the extra overhead |
|---|
| 92 | of :class:`SingletonThreadPool`. NullPool will of course render a ``:memory:`` connection |
|---|
| 93 | useless since the database would be lost as soon as the connection is "returned" to the pool. |
|---|
| 94 | |
|---|
| 95 | Date and Time Types |
|---|
| 96 | ------------------- |
|---|
| 97 | |
|---|
| 98 | SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does not provide |
|---|
| 99 | out of the box functionality for translating values between Python `datetime` objects |
|---|
| 100 | and a SQLite-supported format. SQLAlchemy's own :class:`~sqlalchemy.types.DateTime` |
|---|
| 101 | and related types provide date formatting and parsing functionality when SQlite is used. |
|---|
| 102 | The implementation classes are :class:`SLDateTime`, :class:`SLDate` and :class:`SLTime`. |
|---|
| 103 | These types represent dates and times as ISO formatted strings, which also nicely |
|---|
| 104 | support ordering. There's no reliance on typical "libc" internals for these functions |
|---|
| 105 | so historical dates are fully supported. |
|---|
| 106 | |
|---|
| 107 | Unicode |
|---|
| 108 | ------- |
|---|
| 109 | |
|---|
| 110 | In contrast to SQLAlchemy's active handling of date and time types for pysqlite, pysqlite's |
|---|
| 111 | default behavior regarding Unicode is that all strings are returned as Python unicode objects |
|---|
| 112 | in all cases. So even if the :class:`~sqlalchemy.types.Unicode` type is |
|---|
| 113 | *not* used, you will still always receive unicode data back from a result set. It is |
|---|
| 114 | **strongly** recommended that you do use the :class:`~sqlalchemy.types.Unicode` type |
|---|
| 115 | to represent strings, since it will raise a warning if a non-unicode Python string is |
|---|
| 116 | passed from the user application. Mixing the usage of non-unicode objects with returned unicode objects can |
|---|
| 117 | quickly create confusion, particularly when using the ORM as internal data is not |
|---|
| 118 | always represented by an actual database result string. |
|---|
| 119 | |
|---|
| 120 | """ |
|---|
| 121 | |
|---|
| 122 | |
|---|
| 123 | import datetime, re, time |
|---|
| 124 | |
|---|
| 125 | from sqlalchemy import sql, schema, exc, pool, DefaultClause |
|---|
| 126 | from sqlalchemy.engine import default |
|---|
| 127 | import sqlalchemy.types as sqltypes |
|---|
| 128 | import sqlalchemy.util as util |
|---|
| 129 | from sqlalchemy.sql import compiler, functions as sql_functions |
|---|
| 130 | from types import NoneType |
|---|
| 131 | |
|---|
| 132 | class SLNumeric(sqltypes.Numeric): |
|---|
| 133 | def bind_processor(self, dialect): |
|---|
| 134 | type_ = self.asdecimal and str or float |
|---|
| 135 | def process(value): |
|---|
| 136 | if value is not None: |
|---|
| 137 | return type_(value) |
|---|
| 138 | else: |
|---|
| 139 | return value |
|---|
| 140 | return process |
|---|
| 141 | |
|---|
| 142 | def get_col_spec(self): |
|---|
| 143 | if self.precision is None: |
|---|
| 144 | return "NUMERIC" |
|---|
| 145 | else: |
|---|
| 146 | return "NUMERIC(%(precision)s, %(scale)s)" % {'precision': self.precision, 'scale' : self.scale} |
|---|
| 147 | |
|---|
| 148 | class SLFloat(sqltypes.Float): |
|---|
| 149 | def bind_processor(self, dialect): |
|---|
| 150 | type_ = self.asdecimal and str or float |
|---|
| 151 | def process(value): |
|---|
| 152 | if value is not None: |
|---|
| 153 | return type_(value) |
|---|
| 154 | else: |
|---|
| 155 | return value |
|---|
| 156 | return process |
|---|
| 157 | |
|---|
| 158 | def get_col_spec(self): |
|---|
| 159 | return "FLOAT" |
|---|
| 160 | |
|---|
| 161 | class SLInteger(sqltypes.Integer): |
|---|
| 162 | def get_col_spec(self): |
|---|
| 163 | return "INTEGER" |
|---|
| 164 | |
|---|
| 165 | class SLSmallInteger(sqltypes.Smallinteger): |
|---|
| 166 | def get_col_spec(self): |
|---|
| 167 | return "SMALLINT" |
|---|
| 168 | |
|---|
| 169 | class DateTimeMixin(object): |
|---|
| 170 | def _bind_processor(self, format, elements): |
|---|
| 171 | def process(value): |
|---|
| 172 | if not isinstance(value, (NoneType, datetime.date, datetime.datetime, datetime.time)): |
|---|
| 173 | raise TypeError("SQLite Date, Time, and DateTime types only accept Python datetime objects as input.") |
|---|
| 174 | elif value is not None: |
|---|
| 175 | return format % tuple([getattr(value, attr, 0) for attr in elements]) |
|---|
| 176 | else: |
|---|
| 177 | return None |
|---|
| 178 | return process |
|---|
| 179 | |
|---|
| 180 | def _result_processor(self, fn, regexp): |
|---|
| 181 | def process(value): |
|---|
| 182 | if value is not None: |
|---|
| 183 | return fn(*[int(x or 0) for x in regexp.match(value).groups()]) |
|---|
| 184 | else: |
|---|
| 185 | return None |
|---|
| 186 | return process |
|---|
| 187 | |
|---|
| 188 | class SLDateTime(DateTimeMixin, sqltypes.DateTime): |
|---|
| 189 | __legacy_microseconds__ = False |
|---|
| 190 | |
|---|
| 191 | def get_col_spec(self): |
|---|
| 192 | return "TIMESTAMP" |
|---|
| 193 | |
|---|
| 194 | def bind_processor(self, dialect): |
|---|
| 195 | if self.__legacy_microseconds__: |
|---|
| 196 | return self._bind_processor( |
|---|
| 197 | "%4.4d-%2.2d-%2.2d %2.2d:%2.2d:%2.2d.%s", |
|---|
| 198 | ("year", "month", "day", "hour", "minute", "second", "microsecond") |
|---|
| 199 | ) |
|---|
| 200 | else: |
|---|
| 201 | return self._bind_processor( |
|---|
| 202 | "%4.4d-%2.2d-%2.2d %2.2d:%2.2d:%2.2d.%06d", |
|---|
| 203 | ("year", "month", "day", "hour", "minute", "second", "microsecond") |
|---|
| 204 | ) |
|---|
| 205 | |
|---|
| 206 | _reg = re.compile(r"(\d+)-(\d+)-(\d+)(?: (\d+):(\d+):(\d+)(?:\.(\d+))?)?") |
|---|
| 207 | def result_processor(self, dialect): |
|---|
| 208 | return self._result_processor(datetime.datetime, self._reg) |
|---|
| 209 | |
|---|
| 210 | class SLDate(DateTimeMixin, sqltypes.Date): |
|---|
| 211 | def get_col_spec(self): |
|---|
| 212 | return "DATE" |
|---|
| 213 | |
|---|
| 214 | def bind_processor(self, dialect): |
|---|
| 215 | return self._bind_processor( |
|---|
| 216 | "%4.4d-%2.2d-%2.2d", |
|---|
| 217 | ("year", "month", "day") |
|---|
| 218 | ) |
|---|
| 219 | |
|---|
| 220 | _reg = re.compile(r"(\d+)-(\d+)-(\d+)") |
|---|
| 221 | def result_processor(self, dialect): |
|---|
| 222 | return self._result_processor(datetime.date, self._reg) |
|---|
| 223 | |
|---|
| 224 | class SLTime(DateTimeMixin, sqltypes.Time): |
|---|
| 225 | __legacy_microseconds__ = False |
|---|
| 226 | |
|---|
| 227 | def get_col_spec(self): |
|---|
| 228 | return "TIME" |
|---|
| 229 | |
|---|
| 230 | def bind_processor(self, dialect): |
|---|
| 231 | if self.__legacy_microseconds__: |
|---|
| 232 | return self._bind_processor( |
|---|
| 233 | "%2.2d:%2.2d:%2.2d.%s", |
|---|
| 234 | ("hour", "minute", "second", "microsecond") |
|---|
| 235 | ) |
|---|
| 236 | else: |
|---|
| 237 | return self._bind_processor( |
|---|
| 238 | "%2.2d:%2.2d:%2.2d.%06d", |
|---|
| 239 | ("hour", "minute", "second", "microsecond") |
|---|
| 240 | ) |
|---|
| 241 | |
|---|
| 242 | _reg = re.compile(r"(\d+):(\d+):(\d+)(?:\.(\d+))?") |
|---|
| 243 | def result_processor(self, dialect): |
|---|
| 244 | return self._result_processor(datetime.time, self._reg) |
|---|
| 245 | |
|---|
| 246 | class SLUnicodeMixin(object): |
|---|
| 247 | def bind_processor(self, dialect): |
|---|
| 248 | if self.convert_unicode or dialect.convert_unicode: |
|---|
| 249 | if self.assert_unicode is None: |
|---|
| 250 | assert_unicode = dialect.assert_unicode |
|---|
| 251 | else: |
|---|
| 252 | assert_unicode = self.assert_unicode |
|---|
| 253 | |
|---|
| 254 | if not assert_unicode: |
|---|
| 255 | return None |
|---|
| 256 | |
|---|
| 257 | def process(value): |
|---|
| 258 | if not isinstance(value, (unicode, NoneType)): |
|---|
| 259 | if assert_unicode == 'warn': |
|---|
| 260 | util.warn("Unicode type received non-unicode bind " |
|---|
| 261 | "param value %r" % value) |
|---|
| 262 | return value |
|---|
| 263 | else: |
|---|
| 264 | raise exc.InvalidRequestError("Unicode type received non-unicode bind param value %r" % value) |
|---|
| 265 | else: |
|---|
| 266 | return value |
|---|
| 267 | return process |
|---|
| 268 | else: |
|---|
| 269 | return None |
|---|
| 270 | |
|---|
| 271 | def result_processor(self, dialect): |
|---|
| 272 | return None |
|---|
| 273 | |
|---|
| 274 | class SLText(SLUnicodeMixin, sqltypes.Text): |
|---|
| 275 | def get_col_spec(self): |
|---|
| 276 | return "TEXT" |
|---|
| 277 | |
|---|
| 278 | class SLString(SLUnicodeMixin, sqltypes.String): |
|---|
| 279 | def get_col_spec(self): |
|---|
| 280 | return "VARCHAR" + (self.length and "(%d)" % self.length or "") |
|---|
| 281 | |
|---|
| 282 | class SLChar(SLUnicodeMixin, sqltypes.CHAR): |
|---|
| 283 | def get_col_spec(self): |
|---|
| 284 | return "CHAR" + (self.length and "(%d)" % self.length or "") |
|---|
| 285 | |
|---|
| 286 | class SLBinary(sqltypes.Binary): |
|---|
| 287 | def get_col_spec(self): |
|---|
| 288 | return "BLOB" |
|---|
| 289 | |
|---|
| 290 | class SLBoolean(sqltypes.Boolean): |
|---|
| 291 | def get_col_spec(self): |
|---|
| 292 | return "BOOLEAN" |
|---|
| 293 | |
|---|
| 294 | def bind_processor(self, dialect): |
|---|
| 295 | def process(value): |
|---|
| 296 | if value is None: |
|---|
| 297 | return None |
|---|
| 298 | return value and 1 or 0 |
|---|
| 299 | return process |
|---|
| 300 | |
|---|
| 301 | def result_processor(self, dialect): |
|---|
| 302 | def process(value): |
|---|
| 303 | if value is None: |
|---|
| 304 | return None |
|---|
| 305 | return value == 1 |
|---|
| 306 | return process |
|---|
| 307 | |
|---|
| 308 | colspecs = { |
|---|
| 309 | sqltypes.Binary: SLBinary, |
|---|
| 310 | sqltypes.Boolean: SLBoolean, |
|---|
| 311 | sqltypes.CHAR: SLChar, |
|---|
| 312 | sqltypes.Date: SLDate, |
|---|
| 313 | sqltypes.DateTime: SLDateTime, |
|---|
| 314 | sqltypes.Float: SLFloat, |
|---|
| 315 | sqltypes.Integer: SLInteger, |
|---|
| 316 | sqltypes.NCHAR: SLChar, |
|---|
| 317 | sqltypes.Numeric: SLNumeric, |
|---|
| 318 | sqltypes.Smallinteger: SLSmallInteger, |
|---|
| 319 | sqltypes.String: SLString, |
|---|
| 320 | sqltypes.Text: SLText, |
|---|
| 321 | sqltypes.Time: SLTime, |
|---|
| 322 | } |
|---|
| 323 | |
|---|
| 324 | ischema_names = { |
|---|
| 325 | 'BLOB': SLBinary, |
|---|
| 326 | 'BOOL': SLBoolean, |
|---|
| 327 | 'BOOLEAN': SLBoolean, |
|---|
| 328 | 'CHAR': SLChar, |
|---|
| 329 | 'DATE': SLDate, |
|---|
| 330 | 'DATETIME': SLDateTime, |
|---|
| 331 | 'DECIMAL': SLNumeric, |
|---|
| 332 | 'FLOAT': SLFloat, |
|---|
| 333 | 'INT': SLInteger, |
|---|
| 334 | 'INTEGER': SLInteger, |
|---|
| 335 | 'NUMERIC': SLNumeric, |
|---|
| 336 | 'REAL': SLNumeric, |
|---|
| 337 | 'SMALLINT': SLSmallInteger, |
|---|
| 338 | 'TEXT': SLText, |
|---|
| 339 | 'TIME': SLTime, |
|---|
| 340 | 'TIMESTAMP': SLDateTime, |
|---|
| 341 | 'VARCHAR': SLString, |
|---|
| 342 | } |
|---|
| 343 | |
|---|
| 344 | class SQLiteExecutionContext(default.DefaultExecutionContext): |
|---|
| 345 | def post_exec(self): |
|---|
| 346 | if self.compiled.isinsert and not self.executemany: |
|---|
| 347 | if not len(self._last_inserted_ids) or self._last_inserted_ids[0] is None: |
|---|
| 348 | self._last_inserted_ids = [self.cursor.lastrowid] + self._last_inserted_ids[1:] |
|---|
| 349 | |
|---|
| 350 | class SQLiteDialect(default.DefaultDialect): |
|---|
| 351 | name = 'sqlite' |
|---|
| 352 | supports_alter = False |
|---|
| 353 | supports_unicode_statements = True |
|---|
| 354 | default_paramstyle = 'qmark' |
|---|
| 355 | supports_default_values = True |
|---|
| 356 | supports_empty_insert = False |
|---|
| 357 | |
|---|
| 358 | def __init__(self, **kwargs): |
|---|
| 359 | default.DefaultDialect.__init__(self, **kwargs) |
|---|
| 360 | def vers(num): |
|---|
| 361 | return tuple([int(x) for x in num.split('.')]) |
|---|
| 362 | if self.dbapi is not None: |
|---|
| 363 | sqlite_ver = self.dbapi.version_info |
|---|
| 364 | if sqlite_ver < (2, 1, '3'): |
|---|
| 365 | util.warn( |
|---|
| 366 | ("The installed version of pysqlite2 (%s) is out-dated " |
|---|
| 367 | "and will cause errors in some cases. Version 2.1.3 " |
|---|
| 368 | "or greater is recommended.") % |
|---|
| 369 | '.'.join([str(subver) for subver in sqlite_ver])) |
|---|
| 370 | if self.dbapi.sqlite_version_info < (3, 3, 8): |
|---|
| 371 | self.supports_default_values = False |
|---|
| 372 | self.supports_cast = (self.dbapi is None or vers(self.dbapi.sqlite_version) >= vers("3.2.3")) |
|---|
| 373 | |
|---|
| 374 | def dbapi(cls): |
|---|
| 375 | try: |
|---|
| 376 | from pysqlite2 import dbapi2 as sqlite |
|---|
| 377 | except ImportError, e: |
|---|
| 378 | try: |
|---|
| 379 | from sqlite3 import dbapi2 as sqlite #try the 2.5+ stdlib name. |
|---|
| 380 | except ImportError: |
|---|
| 381 | raise e |
|---|
| 382 | return sqlite |
|---|
| 383 | dbapi = classmethod(dbapi) |
|---|
| 384 | |
|---|
| 385 | def server_version_info(self, connection): |
|---|
| 386 | return self.dbapi.sqlite_version_info |
|---|
| 387 | |
|---|
| 388 | def create_connect_args(self, url): |
|---|
| 389 | if url.username or url.password or url.host or url.port: |
|---|
| 390 | raise exc.ArgumentError( |
|---|
| 391 | "Invalid SQLite URL: %s\n" |
|---|
| 392 | "Valid SQLite URL forms are:\n" |
|---|
| 393 | " sqlite:///:memory: (or, sqlite://)\n" |
|---|
| 394 | " sqlite:///relative/path/to/file.db\n" |
|---|
| 395 | " sqlite:////absolute/path/to/file.db" % (url,)) |
|---|
| 396 | filename = url.database or ':memory:' |
|---|
| 397 | |
|---|
| 398 | opts = url.query.copy() |
|---|
| 399 | util.coerce_kw_type(opts, 'timeout', float) |
|---|
| 400 | util.coerce_kw_type(opts, 'isolation_level', str) |
|---|
| 401 | util.coerce_kw_type(opts, 'detect_types', int) |
|---|
| 402 | util.coerce_kw_type(opts, 'check_same_thread', bool) |
|---|
| 403 | util.coerce_kw_type(opts, 'cached_statements', int) |
|---|
| 404 | |
|---|
| 405 | return ([filename], opts) |
|---|
| 406 | |
|---|
| 407 | def type_descriptor(self, typeobj): |
|---|
| 408 | return sqltypes.adapt_type(typeobj, colspecs) |
|---|
| 409 | |
|---|
| 410 | def is_disconnect(self, e): |
|---|
| 411 | return isinstance(e, self.dbapi.ProgrammingError) and "Cannot operate on a closed database." in str(e) |
|---|
| 412 | |
|---|
| 413 | def table_names(self, connection, schema): |
|---|
| 414 | if schema is not None: |
|---|
| 415 | qschema = self.identifier_preparer.quote_identifier(schema) |
|---|
| 416 | master = '%s.sqlite_master' % qschema |
|---|
| 417 | s = ("SELECT name FROM %s " |
|---|
| 418 | "WHERE type='table' ORDER BY name") % (master,) |
|---|
| 419 | rs = connection.execute(s) |
|---|
| 420 | else: |
|---|
| 421 | try: |
|---|
| 422 | s = ("SELECT name FROM " |
|---|
| 423 | " (SELECT * FROM sqlite_master UNION ALL " |
|---|
| 424 | " SELECT * FROM sqlite_temp_master) " |
|---|
| 425 | "WHERE type='table' ORDER BY name") |
|---|
| 426 | rs = connection.execute(s) |
|---|
| 427 | except exc.DBAPIError: |
|---|
| 428 | raise |
|---|
| 429 | s = ("SELECT name FROM sqlite_master " |
|---|
| 430 | "WHERE type='table' ORDER BY name") |
|---|
| 431 | rs = connection.execute(s) |
|---|
| 432 | |
|---|
| 433 | return [row[0] for row in rs] |
|---|
| 434 | |
|---|
| 435 | def has_table(self, connection, table_name, schema=None): |
|---|
| 436 | quote = self.identifier_preparer.quote_identifier |
|---|
| 437 | if schema is not None: |
|---|
| 438 | pragma = "PRAGMA %s." % quote(schema) |
|---|
| 439 | else: |
|---|
| 440 | pragma = "PRAGMA " |
|---|
| 441 | qtable = quote(table_name) |
|---|
| 442 | cursor = _pragma_cursor(connection.execute("%stable_info(%s)" % (pragma, qtable))) |
|---|
| 443 | |
|---|
| 444 | row = cursor.fetchone() |
|---|
| 445 | |
|---|
| 446 | # consume remaining rows, to work around |
|---|
| 447 | # http://www.sqlite.org/cvstrac/tktview?tn=1884 |
|---|
| 448 | while cursor.fetchone() is not None: |
|---|
| 449 | pass |
|---|
| 450 | |
|---|
| 451 | return (row is not None) |
|---|
| 452 | |
|---|
| 453 | def reflecttable(self, connection, table, include_columns): |
|---|
| 454 | preparer = self.identifier_preparer |
|---|
| 455 | if table.schema is None: |
|---|
| 456 | pragma = "PRAGMA " |
|---|
| 457 | else: |
|---|
| 458 | pragma = "PRAGMA %s." % preparer.quote_identifier(table.schema) |
|---|
| 459 | qtable = preparer.format_table(table, False) |
|---|
| 460 | |
|---|
| 461 | c = _pragma_cursor(connection.execute("%stable_info(%s)" % (pragma, qtable))) |
|---|
| 462 | found_table = False |
|---|
| 463 | while True: |
|---|
| 464 | row = c.fetchone() |
|---|
| 465 | if row is None: |
|---|
| 466 | break |
|---|
| 467 | |
|---|
| 468 | found_table = True |
|---|
| 469 | (name, type_, nullable, default, has_default, primary_key) = (row[1], row[2].upper(), not row[3], row[4], row[4] is not None, row[5]) |
|---|
| 470 | name = re.sub(r'^\"|\"$', '', name) |
|---|
| 471 | if include_columns and name not in include_columns: |
|---|
| 472 | continue |
|---|
| 473 | match = re.match(r'(\w+)(\(.*?\))?', type_) |
|---|
| 474 | if match: |
|---|
| 475 | coltype = match.group(1) |
|---|
| 476 | args = match.group(2) |
|---|
| 477 | else: |
|---|
| 478 | coltype = "VARCHAR" |
|---|
| 479 | args = '' |
|---|
| 480 | |
|---|
| 481 | try: |
|---|
| 482 | coltype = ischema_names[coltype] |
|---|
| 483 | except KeyError: |
|---|
| 484 | util.warn("Did not recognize type '%s' of column '%s'" % |
|---|
| 485 | (coltype, name)) |
|---|
| 486 | coltype = sqltypes.NullType |
|---|
| 487 | |
|---|
| 488 | if args is not None: |
|---|
| 489 | args = re.findall(r'(\d+)', args) |
|---|
| 490 | coltype = coltype(*[int(a) for a in args]) |
|---|
| 491 | |
|---|
| 492 | colargs = [] |
|---|
| 493 | if has_default: |
|---|
| 494 | colargs.append(DefaultClause(sql.text(default))) |
|---|
| 495 | table.append_column(schema.Column(name, coltype, primary_key = primary_key, nullable = nullable, *colargs)) |
|---|
| 496 | |
|---|
| 497 | if not found_table: |
|---|
| 498 | raise exc.NoSuchTableError(table.name) |
|---|
| 499 | |
|---|
| 500 | c = _pragma_cursor(connection.execute("%sforeign_key_list(%s)" % (pragma, qtable))) |
|---|
| 501 | fks = {} |
|---|
| 502 | while True: |
|---|
| 503 | row = c.fetchone() |
|---|
| 504 | if row is None: |
|---|
| 505 | break |
|---|
| 506 | (constraint_name, tablename, localcol, remotecol) = (row[0], row[2], row[3], row[4]) |
|---|
| 507 | tablename = re.sub(r'^\"|\"$', '', tablename) |
|---|
| 508 | localcol = re.sub(r'^\"|\"$', '', localcol) |
|---|
| 509 | remotecol = re.sub(r'^\"|\"$', '', remotecol) |
|---|
| 510 | try: |
|---|
| 511 | fk = fks[constraint_name] |
|---|
| 512 | except KeyError: |
|---|
| 513 | fk = ([], []) |
|---|
| 514 | fks[constraint_name] = fk |
|---|
| 515 | |
|---|
| 516 | # look up the table based on the given table's engine, not 'self', |
|---|
| 517 | # since it could be a ProxyEngine |
|---|
| 518 | remotetable = schema.Table(tablename, table.metadata, autoload=True, autoload_with=connection) |
|---|
| 519 | constrained_column = table.c[localcol].name |
|---|
| 520 | refspec = ".".join([tablename, remotecol]) |
|---|
| 521 | if constrained_column not in fk[0]: |
|---|
| 522 | fk[0].append(constrained_column) |
|---|
| 523 | if refspec not in fk[1]: |
|---|
| 524 | fk[1].append(refspec) |
|---|
| 525 | for name, value in fks.iteritems(): |
|---|
| 526 | table.append_constraint(schema.ForeignKeyConstraint(value[0], value[1], link_to_name=True)) |
|---|
| 527 | # check for UNIQUE indexes |
|---|
| 528 | c = _pragma_cursor(connection.execute("%sindex_list(%s)" % (pragma, qtable))) |
|---|
| 529 | unique_indexes = [] |
|---|
| 530 | while True: |
|---|
| 531 | row = c.fetchone() |
|---|
| 532 | if row is None: |
|---|
| 533 | break |
|---|
| 534 | if (row[2] == 1): |
|---|
| 535 | unique_indexes.append(row[1]) |
|---|
| 536 | # loop thru unique indexes for one that includes the primary key |
|---|
| 537 | for idx in unique_indexes: |
|---|
| 538 | c = connection.execute("%sindex_info(%s)" % (pragma, idx)) |
|---|
| 539 | cols = [] |
|---|
| 540 | while True: |
|---|
| 541 | row = c.fetchone() |
|---|
| 542 | if row is None: |
|---|
| 543 | break |
|---|
| 544 | cols.append(row[2]) |
|---|
| 545 | |
|---|
| 546 | def _pragma_cursor(cursor): |
|---|
| 547 | if cursor.closed: |
|---|
| 548 | cursor._fetchone_impl = lambda: None |
|---|
| 549 | return cursor |
|---|
| 550 | |
|---|
| 551 | class SQLiteCompiler(compiler.DefaultCompiler): |
|---|
| 552 | functions = compiler.DefaultCompiler.functions.copy() |
|---|
| 553 | functions.update ( |
|---|
| 554 | { |
|---|
| 555 | sql_functions.now: 'CURRENT_TIMESTAMP', |
|---|
| 556 | sql_functions.char_length: 'length%(expr)s' |
|---|
| 557 | } |
|---|
| 558 | ) |
|---|
| 559 | |
|---|
| 560 | extract_map = compiler.DefaultCompiler.extract_map.copy() |
|---|
| 561 | extract_map.update({ |
|---|
| 562 | 'month': '%m', |
|---|
| 563 | 'day': '%d', |
|---|
| 564 | 'year': '%Y', |
|---|
| 565 | 'second': '%S', |
|---|
| 566 | 'hour': '%H', |
|---|
| 567 | 'doy': '%j', |
|---|
| 568 | 'minute': '%M', |
|---|
| 569 | 'epoch': '%s', |
|---|
| 570 | 'dow': '%w', |
|---|
| 571 | 'week': '%W' |
|---|
| 572 | }) |
|---|
| 573 | |
|---|
| 574 | def visit_cast(self, cast, **kwargs): |
|---|
| 575 | if self.dialect.supports_cast: |
|---|
| 576 | return super(SQLiteCompiler, self).visit_cast(cast) |
|---|
| 577 | else: |
|---|
| 578 | return self.process(cast.clause) |
|---|
| 579 | |
|---|
| 580 | def visit_extract(self, extract): |
|---|
| 581 | try: |
|---|
| 582 | return "CAST(STRFTIME('%s', %s) AS INTEGER)" % ( |
|---|
| 583 | self.extract_map[extract.field], self.process(extract.expr)) |
|---|
| 584 | except KeyError: |
|---|
| 585 | raise exc.ArgumentError( |
|---|
| 586 | "%s is not a valid extract argument." % extract.field) |
|---|
| 587 | |
|---|
| 588 | def limit_clause(self, select): |
|---|
| 589 | text = "" |
|---|
| 590 | if select._limit is not None: |
|---|
| 591 | text += " \n LIMIT " + str(select._limit) |
|---|
| 592 | if select._offset is not None: |
|---|
| 593 | if select._limit is None: |
|---|
| 594 | text += " \n LIMIT -1" |
|---|
| 595 | text += " OFFSET " + str(select._offset) |
|---|
| 596 | else: |
|---|
| 597 | text += " OFFSET 0" |
|---|
| 598 | return text |
|---|
| 599 | |
|---|
| 600 | def for_update_clause(self, select): |
|---|
| 601 | # sqlite has no "FOR UPDATE" AFAICT |
|---|
| 602 | return '' |
|---|
| 603 | |
|---|
| 604 | |
|---|
| 605 | class SQLiteSchemaGenerator(compiler.SchemaGenerator): |
|---|
| 606 | |
|---|
| 607 | def get_column_specification(self, column, **kwargs): |
|---|
| 608 | colspec = self.preparer.format_column(column) + " " + column.type.dialect_impl(self.dialect).get_col_spec() |
|---|
| 609 | default = self.get_column_default_string(column) |
|---|
| 610 | if default is not None: |
|---|
| 611 | colspec += " DEFAULT " + default |
|---|
| 612 | |
|---|
| 613 | if not column.nullable: |
|---|
| 614 | colspec += " NOT NULL" |
|---|
| 615 | return colspec |
|---|
| 616 | |
|---|
| 617 | class SQLiteIdentifierPreparer(compiler.IdentifierPreparer): |
|---|
| 618 | reserved_words = set([ |
|---|
| 619 | 'add', 'after', 'all', 'alter', 'analyze', 'and', 'as', 'asc', |
|---|
| 620 | 'attach', 'autoincrement', 'before', 'begin', 'between', 'by', |
|---|
| 621 | 'cascade', 'case', 'cast', 'check', 'collate', 'column', 'commit', |
|---|
| 622 | 'conflict', 'constraint', 'create', 'cross', 'current_date', |
|---|
| 623 | 'current_time', 'current_timestamp', 'database', 'default', |
|---|
| 624 | 'deferrable', 'deferred', 'delete', 'desc', 'detach', 'distinct', |
|---|
| 625 | 'drop', 'each', 'else', 'end', 'escape', 'except', 'exclusive', |
|---|
| 626 | 'explain', 'false', 'fail', 'for', 'foreign', 'from', 'full', 'glob', |
|---|
| 627 | 'group', 'having', 'if', 'ignore', 'immediate', 'in', 'index', |
|---|
| 628 | 'initially', 'inner', 'insert', 'instead', 'intersect', 'into', 'is', |
|---|
| 629 | 'isnull', 'join', 'key', 'left', 'like', 'limit', 'match', 'natural', |
|---|
| 630 | 'not', 'notnull', 'null', 'of', 'offset', 'on', 'or', 'order', 'outer', |
|---|
| 631 | 'plan', 'pragma', 'primary', 'query', 'raise', 'references', |
|---|
| 632 | 'reindex', 'rename', 'replace', 'restrict', 'right', 'rollback', |
|---|
| 633 | 'row', 'select', 'set', 'table', 'temp', 'temporary', 'then', 'to', |
|---|
| 634 | 'transaction', 'trigger', 'true', 'union', 'unique', 'update', 'using', |
|---|
| 635 | 'vacuum', 'values', 'view', 'virtual', 'when', 'where', 'indexed', |
|---|
| 636 | ]) |
|---|
| 637 | |
|---|
| 638 | def __init__(self, dialect): |
|---|
| 639 | super(SQLiteIdentifierPreparer, self).__init__(dialect) |
|---|
| 640 | |
|---|
| 641 | dialect = SQLiteDialect |
|---|
| 642 | dialect.poolclass = pool.SingletonThreadPool |
|---|
| 643 | dialect.statement_compiler = SQLiteCompiler |
|---|
| 644 | dialect.schemagenerator = SQLiteSchemaGenerator |
|---|
| 645 | dialect.preparer = SQLiteIdentifierPreparer |
|---|
| 646 | dialect.execution_ctx_cls = SQLiteExecutionContext |
|---|