Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# sql/sqltypes.py 

2# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors 

3# <see AUTHORS file> 

4# 

5# This module is part of SQLAlchemy and is released under 

6# the MIT License: http://www.opensource.org/licenses/mit-license.php 

7 

8"""SQL specific types. 

9 

10""" 

11 

12import codecs 

13import datetime as dt 

14import decimal 

15import json 

16 

17from . import elements 

18from . import operators 

19from . import type_api 

20from .base import _bind_or_error 

21from .base import NO_ARG 

22from .base import SchemaEventTarget 

23from .elements import _defer_name 

24from .elements import _literal_as_binds 

25from .elements import quoted_name 

26from .elements import Slice 

27from .elements import TypeCoerce as type_coerce # noqa 

28from .type_api import Emulated 

29from .type_api import NativeForEmulated # noqa 

30from .type_api import to_instance 

31from .type_api import TypeDecorator 

32from .type_api import TypeEngine 

33from .type_api import Variant 

34from .. import event 

35from .. import exc 

36from .. import inspection 

37from .. import processors 

38from .. import util 

39from ..util import compat 

40from ..util import pickle 

41 

42 

43if util.jython: 

44 import array 

45 

46 

47class _LookupExpressionAdapter(object): 

48 

49 """Mixin expression adaptations based on lookup tables. 

50 

51 These rules are currently used by the numeric, integer and date types 

52 which have detailed cross-expression coercion rules. 

53 

54 """ 

55 

56 @property 

57 def _expression_adaptations(self): 

58 raise NotImplementedError() 

59 

60 class Comparator(TypeEngine.Comparator): 

61 _blank_dict = util.immutabledict() 

62 

63 def _adapt_expression(self, op, other_comparator): 

64 othertype = other_comparator.type._type_affinity 

65 lookup = self.type._expression_adaptations.get( 

66 op, self._blank_dict 

67 ).get(othertype, self.type) 

68 if lookup is othertype: 

69 return (op, other_comparator.type) 

70 elif lookup is self.type._type_affinity: 

71 return (op, self.type) 

72 else: 

73 return (op, to_instance(lookup)) 

74 

75 comparator_factory = Comparator 

76 

77 

78class Concatenable(object): 

79 

80 """A mixin that marks a type as supporting 'concatenation', 

81 typically strings.""" 

82 

83 class Comparator(TypeEngine.Comparator): 

84 def _adapt_expression(self, op, other_comparator): 

85 if op is operators.add and isinstance( 

86 other_comparator, 

87 (Concatenable.Comparator, NullType.Comparator), 

88 ): 

89 return operators.concat_op, self.expr.type 

90 else: 

91 return super(Concatenable.Comparator, self)._adapt_expression( 

92 op, other_comparator 

93 ) 

94 

95 comparator_factory = Comparator 

96 

97 

98class Indexable(object): 

99 """A mixin that marks a type as supporting indexing operations, 

100 such as array or JSON structures. 

101 

102 

103 .. versionadded:: 1.1.0 

104 

105 

106 """ 

107 

108 class Comparator(TypeEngine.Comparator): 

109 def _setup_getitem(self, index): 

110 raise NotImplementedError() 

111 

112 def __getitem__(self, index): 

113 ( 

114 adjusted_op, 

115 adjusted_right_expr, 

116 result_type, 

117 ) = self._setup_getitem(index) 

118 return self.operate( 

119 adjusted_op, adjusted_right_expr, result_type=result_type 

120 ) 

121 

122 comparator_factory = Comparator 

123 

124 

125class String(Concatenable, TypeEngine): 

126 

127 """The base for all string and character types. 

128 

129 In SQL, corresponds to VARCHAR. Can also take Python unicode objects 

130 and encode to the database's encoding in bind params (and the reverse for 

131 result sets.) 

132 

133 The `length` field is usually required when the `String` type is 

134 used within a CREATE TABLE statement, as VARCHAR requires a length 

135 on most databases. 

136 

137 """ 

138 

139 __visit_name__ = "string" 

140 

141 @util.deprecated_params( 

142 convert_unicode=( 

143 "1.3", 

144 "The :paramref:`.String.convert_unicode` parameter is deprecated " 

145 "and will be removed in a future release. All modern DBAPIs " 

146 "now support Python Unicode directly and this parameter is " 

147 "unnecessary.", 

148 ), 

149 unicode_error=( 

150 "1.3", 

151 "The :paramref:`.String.unicode_errors` parameter is deprecated " 

152 "and will be removed in a future release. This parameter is " 

153 "unnecessary for modern Python DBAPIs and degrades performance " 

154 "significantly.", 

155 ), 

156 ) 

157 def __init__( 

158 self, 

159 length=None, 

160 collation=None, 

161 convert_unicode=False, 

162 unicode_error=None, 

163 _warn_on_bytestring=False, 

164 _expect_unicode=False, 

165 ): 

166 """ 

167 Create a string-holding type. 

168 

169 :param length: optional, a length for the column for use in 

170 DDL and CAST expressions. May be safely omitted if no ``CREATE 

171 TABLE`` will be issued. Certain databases may require a 

172 ``length`` for use in DDL, and will raise an exception when 

173 the ``CREATE TABLE`` DDL is issued if a ``VARCHAR`` 

174 with no length is included. Whether the value is 

175 interpreted as bytes or characters is database specific. 

176 

177 :param collation: Optional, a column-level collation for 

178 use in DDL and CAST expressions. Renders using the 

179 COLLATE keyword supported by SQLite, MySQL, and PostgreSQL. 

180 E.g.:: 

181 

182 >>> from sqlalchemy import cast, select, String 

183 >>> print(select([cast('some string', String(collation='utf8'))])) 

184 SELECT CAST(:param_1 AS VARCHAR COLLATE utf8) AS anon_1 

185 

186 :param convert_unicode: When set to ``True``, the 

187 :class:`.String` type will assume that 

188 input is to be passed as Python Unicode objects under Python 2, 

189 and results returned as Python Unicode objects. 

190 In the rare circumstance that the DBAPI does not support 

191 Python unicode under Python 2, SQLAlchemy will use its own 

192 encoder/decoder functionality on strings, referring to the 

193 value of the :paramref:`_sa.create_engine.encoding` parameter 

194 parameter passed to :func:`_sa.create_engine` as the encoding. 

195 

196 For the extremely rare case that Python Unicode 

197 is to be encoded/decoded by SQLAlchemy on a backend 

198 that *does* natively support Python Unicode, 

199 the string value ``"force"`` can be passed here which will 

200 cause SQLAlchemy's encode/decode services to be 

201 used unconditionally. 

202 

203 .. note:: 

204 

205 SQLAlchemy's unicode-conversion flags and features only apply 

206 to Python 2; in Python 3, all string objects are Unicode objects. 

207 For this reason, as well as the fact that virtually all modern 

208 DBAPIs now support Unicode natively even under Python 2, 

209 the :paramref:`.String.convert_unicode` flag is inherently a 

210 legacy feature. 

211 

212 .. note:: 

213 

214 In the vast majority of cases, the :class:`.Unicode` or 

215 :class:`.UnicodeText` datatypes should be used for a 

216 :class:`_schema.Column` that expects to store non-ascii data. 

217 These 

218 datatypes will ensure that the correct types are used on the 

219 database side as well as set up the correct Unicode behaviors 

220 under Python 2. 

221 

222 .. seealso:: 

223 

224 :paramref:`_sa.create_engine.convert_unicode` - 

225 :class:`_engine.Engine`-wide parameter 

226 

227 :param unicode_error: Optional, a method to use to handle Unicode 

228 conversion errors. Behaves like the ``errors`` keyword argument to 

229 the standard library's ``string.decode()`` functions, requires 

230 that :paramref:`.String.convert_unicode` is set to 

231 ``"force"`` 

232 

233 """ 

234 if unicode_error is not None and convert_unicode != "force": 

235 raise exc.ArgumentError( 

236 "convert_unicode must be 'force' " "when unicode_error is set." 

237 ) 

238 

239 self.length = length 

240 self.collation = collation 

241 self._expect_unicode = convert_unicode or _expect_unicode 

242 self._expect_unicode_error = unicode_error 

243 

244 self._warn_on_bytestring = _warn_on_bytestring 

245 

246 def literal_processor(self, dialect): 

247 def process(value): 

248 value = value.replace("'", "''") 

249 

250 if dialect.identifier_preparer._double_percents: 

251 value = value.replace("%", "%%") 

252 

253 return "'%s'" % value 

254 

255 return process 

256 

257 def bind_processor(self, dialect): 

258 if self._expect_unicode or dialect.convert_unicode: 

259 if ( 

260 dialect.supports_unicode_binds 

261 and self._expect_unicode != "force" 

262 ): 

263 if self._warn_on_bytestring: 

264 

265 def process(value): 

266 if isinstance(value, util.binary_type): 

267 util.warn_limited( 

268 "Unicode type received non-unicode " 

269 "bind param value %r.", 

270 (util.ellipses_string(value),), 

271 ) 

272 return value 

273 

274 return process 

275 else: 

276 return None 

277 else: 

278 encoder = codecs.getencoder(dialect.encoding) 

279 warn_on_bytestring = self._warn_on_bytestring 

280 

281 def process(value): 

282 if isinstance(value, util.text_type): 

283 return encoder(value, self._expect_unicode_error)[0] 

284 elif warn_on_bytestring and value is not None: 

285 util.warn_limited( 

286 "Unicode type received non-unicode bind " 

287 "param value %r.", 

288 (util.ellipses_string(value),), 

289 ) 

290 return value 

291 

292 return process 

293 else: 

294 return None 

295 

296 def result_processor(self, dialect, coltype): 

297 wants_unicode = self._expect_unicode or dialect.convert_unicode 

298 needs_convert = wants_unicode and ( 

299 dialect.returns_unicode_strings is not True 

300 or self._expect_unicode in ("force", "force_nocheck") 

301 ) 

302 needs_isinstance = ( 

303 needs_convert 

304 and dialect.returns_unicode_strings 

305 and self._expect_unicode != "force_nocheck" 

306 ) 

307 if needs_convert: 

308 if needs_isinstance: 

309 return processors.to_conditional_unicode_processor_factory( 

310 dialect.encoding, self._expect_unicode_error 

311 ) 

312 else: 

313 return processors.to_unicode_processor_factory( 

314 dialect.encoding, self._expect_unicode_error 

315 ) 

316 else: 

317 return None 

318 

319 @property 

320 def python_type(self): 

321 if self._expect_unicode: 

322 return util.text_type 

323 else: 

324 return str 

325 

326 def get_dbapi_type(self, dbapi): 

327 return dbapi.STRING 

328 

329 @classmethod 

330 def _warn_deprecated_unicode(cls): 

331 util.warn_deprecated( 

332 "The convert_unicode on Engine and String as well as the " 

333 "unicode_error flag on String are deprecated. All modern " 

334 "DBAPIs now support Python Unicode natively under Python 2, and " 

335 "under Python 3 all strings are inherently Unicode. These flags " 

336 "will be removed in a future release." 

337 ) 

338 

339 

340class Text(String): 

341 

342 """A variably sized string type. 

343 

344 In SQL, usually corresponds to CLOB or TEXT. Can also take Python 

345 unicode objects and encode to the database's encoding in bind 

346 params (and the reverse for result sets.) In general, TEXT objects 

347 do not have a length; while some databases will accept a length 

348 argument here, it will be rejected by others. 

349 

350 """ 

351 

352 __visit_name__ = "text" 

353 

354 

355class Unicode(String): 

356 

357 """A variable length Unicode string type. 

358 

359 The :class:`.Unicode` type is a :class:`.String` subclass 

360 that assumes input and output as Python ``unicode`` data, 

361 and in that regard is equivalent to the usage of the 

362 ``convert_unicode`` flag with the :class:`.String` type. 

363 However, unlike plain :class:`.String`, it also implies an 

364 underlying column type that is explicitly supporting of non-ASCII 

365 data, such as ``NVARCHAR`` on Oracle and SQL Server. 

366 This can impact the output of ``CREATE TABLE`` statements 

367 and ``CAST`` functions at the dialect level, and can 

368 also affect the handling of bound parameters in some 

369 specific DBAPI scenarios. 

370 

371 The encoding used by the :class:`.Unicode` type is usually 

372 determined by the DBAPI itself; most modern DBAPIs 

373 feature support for Python ``unicode`` objects as bound 

374 values and result set values, and the encoding should 

375 be configured as detailed in the notes for the target 

376 DBAPI in the :ref:`dialect_toplevel` section. 

377 

378 For those DBAPIs which do not support, or are not configured 

379 to accommodate Python ``unicode`` objects 

380 directly, SQLAlchemy does the encoding and decoding 

381 outside of the DBAPI. The encoding in this scenario 

382 is determined by the ``encoding`` flag passed to 

383 :func:`_sa.create_engine`. 

384 

385 When using the :class:`.Unicode` type, it is only appropriate 

386 to pass Python ``unicode`` objects, and not plain ``str``. 

387 If a plain ``str`` is passed under Python 2, a warning 

388 is emitted. If you notice your application emitting these warnings but 

389 you're not sure of the source of them, the Python 

390 ``warnings`` filter, documented at 

391 http://docs.python.org/library/warnings.html, 

392 can be used to turn these warnings into exceptions 

393 which will illustrate a stack trace:: 

394 

395 import warnings 

396 warnings.simplefilter('error') 

397 

398 For an application that wishes to pass plain bytestrings 

399 and Python ``unicode`` objects to the ``Unicode`` type 

400 equally, the bytestrings must first be decoded into 

401 unicode. The recipe at :ref:`coerce_to_unicode` illustrates 

402 how this is done. 

403 

404 .. seealso:: 

405 

406 :class:`.UnicodeText` - unlengthed textual counterpart 

407 to :class:`.Unicode`. 

408 

409 """ 

410 

411 __visit_name__ = "unicode" 

412 

413 def __init__(self, length=None, **kwargs): 

414 """ 

415 Create a :class:`.Unicode` object. 

416 

417 Parameters are the same as that of :class:`.String`, 

418 with the exception that ``convert_unicode`` 

419 defaults to ``True``. 

420 

421 """ 

422 kwargs.setdefault("_expect_unicode", True) 

423 kwargs.setdefault("_warn_on_bytestring", True) 

424 super(Unicode, self).__init__(length=length, **kwargs) 

425 

426 

427class UnicodeText(Text): 

428 

429 """An unbounded-length Unicode string type. 

430 

431 See :class:`.Unicode` for details on the unicode 

432 behavior of this object. 

433 

434 Like :class:`.Unicode`, usage the :class:`.UnicodeText` type implies a 

435 unicode-capable type being used on the backend, such as 

436 ``NCLOB``, ``NTEXT``. 

437 

438 """ 

439 

440 __visit_name__ = "unicode_text" 

441 

442 def __init__(self, length=None, **kwargs): 

443 """ 

444 Create a Unicode-converting Text type. 

445 

446 Parameters are the same as that of :class:`_expression.TextClause`, 

447 with the exception that ``convert_unicode`` 

448 defaults to ``True``. 

449 

450 """ 

451 kwargs.setdefault("_expect_unicode", True) 

452 kwargs.setdefault("_warn_on_bytestring", True) 

453 super(UnicodeText, self).__init__(length=length, **kwargs) 

454 

455 def _warn_deprecated_unicode(self): 

456 pass 

457 

458 

459class Integer(_LookupExpressionAdapter, TypeEngine): 

460 

461 """A type for ``int`` integers.""" 

462 

463 __visit_name__ = "integer" 

464 

465 def get_dbapi_type(self, dbapi): 

466 return dbapi.NUMBER 

467 

468 @property 

469 def python_type(self): 

470 return int 

471 

472 def literal_processor(self, dialect): 

473 def process(value): 

474 return str(value) 

475 

476 return process 

477 

478 @util.memoized_property 

479 def _expression_adaptations(self): 

480 # TODO: need a dictionary object that will 

481 # handle operators generically here, this is incomplete 

482 return { 

483 operators.add: { 

484 Date: Date, 

485 Integer: self.__class__, 

486 Numeric: Numeric, 

487 }, 

488 operators.mul: { 

489 Interval: Interval, 

490 Integer: self.__class__, 

491 Numeric: Numeric, 

492 }, 

493 operators.div: {Integer: self.__class__, Numeric: Numeric}, 

494 operators.truediv: {Integer: self.__class__, Numeric: Numeric}, 

495 operators.sub: {Integer: self.__class__, Numeric: Numeric}, 

496 } 

497 

498 

499class SmallInteger(Integer): 

500 

501 """A type for smaller ``int`` integers. 

502 

503 Typically generates a ``SMALLINT`` in DDL, and otherwise acts like 

504 a normal :class:`.Integer` on the Python side. 

505 

506 """ 

507 

508 __visit_name__ = "small_integer" 

509 

510 

511class BigInteger(Integer): 

512 

513 """A type for bigger ``int`` integers. 

514 

515 Typically generates a ``BIGINT`` in DDL, and otherwise acts like 

516 a normal :class:`.Integer` on the Python side. 

517 

518 """ 

519 

520 __visit_name__ = "big_integer" 

521 

522 

523class Numeric(_LookupExpressionAdapter, TypeEngine): 

524 

525 """A type for fixed precision numbers, such as ``NUMERIC`` or ``DECIMAL``. 

526 

527 This type returns Python ``decimal.Decimal`` objects by default, unless 

528 the :paramref:`.Numeric.asdecimal` flag is set to False, in which case 

529 they are coerced to Python ``float`` objects. 

530 

531 .. note:: 

532 

533 The :class:`.Numeric` type is designed to receive data from a database 

534 type that is explicitly known to be a decimal type 

535 (e.g. ``DECIMAL``, ``NUMERIC``, others) and not a floating point 

536 type (e.g. ``FLOAT``, ``REAL``, others). 

537 If the database column on the server is in fact a floating-point type 

538 type, such as ``FLOAT`` or ``REAL``, use the :class:`.Float` 

539 type or a subclass, otherwise numeric coercion between 

540 ``float``/``Decimal`` may or may not function as expected. 

541 

542 .. note:: 

543 

544 The Python ``decimal.Decimal`` class is generally slow 

545 performing; cPython 3.3 has now switched to use the `cdecimal 

546 <http://pypi.python.org/pypi/cdecimal/>`_ library natively. For 

547 older Python versions, the ``cdecimal`` library can be patched 

548 into any application where it will replace the ``decimal`` 

549 library fully, however this needs to be applied globally and 

550 before any other modules have been imported, as follows:: 

551 

552 import sys 

553 import cdecimal 

554 sys.modules["decimal"] = cdecimal 

555 

556 Note that the ``cdecimal`` and ``decimal`` libraries are **not 

557 compatible with each other**, so patching ``cdecimal`` at the 

558 global level is the only way it can be used effectively with 

559 various DBAPIs that hardcode to import the ``decimal`` library. 

560 

561 """ 

562 

563 __visit_name__ = "numeric" 

564 

565 _default_decimal_return_scale = 10 

566 

567 def __init__( 

568 self, 

569 precision=None, 

570 scale=None, 

571 decimal_return_scale=None, 

572 asdecimal=True, 

573 ): 

574 """ 

575 Construct a Numeric. 

576 

577 :param precision: the numeric precision for use in DDL ``CREATE 

578 TABLE``. 

579 

580 :param scale: the numeric scale for use in DDL ``CREATE TABLE``. 

581 

582 :param asdecimal: default True. Return whether or not 

583 values should be sent as Python Decimal objects, or 

584 as floats. Different DBAPIs send one or the other based on 

585 datatypes - the Numeric type will ensure that return values 

586 are one or the other across DBAPIs consistently. 

587 

588 :param decimal_return_scale: Default scale to use when converting 

589 from floats to Python decimals. Floating point values will typically 

590 be much longer due to decimal inaccuracy, and most floating point 

591 database types don't have a notion of "scale", so by default the 

592 float type looks for the first ten decimal places when converting. 

593 Specifying this value will override that length. Types which 

594 do include an explicit ".scale" value, such as the base 

595 :class:`.Numeric` as well as the MySQL float types, will use the 

596 value of ".scale" as the default for decimal_return_scale, if not 

597 otherwise specified. 

598 

599 .. versionadded:: 0.9.0 

600 

601 When using the ``Numeric`` type, care should be taken to ensure 

602 that the asdecimal setting is appropriate for the DBAPI in use - 

603 when Numeric applies a conversion from Decimal->float or float-> 

604 Decimal, this conversion incurs an additional performance overhead 

605 for all result columns received. 

606 

607 DBAPIs that return Decimal natively (e.g. psycopg2) will have 

608 better accuracy and higher performance with a setting of ``True``, 

609 as the native translation to Decimal reduces the amount of floating- 

610 point issues at play, and the Numeric type itself doesn't need 

611 to apply any further conversions. However, another DBAPI which 

612 returns floats natively *will* incur an additional conversion 

613 overhead, and is still subject to floating point data loss - in 

614 which case ``asdecimal=False`` will at least remove the extra 

615 conversion overhead. 

616 

617 """ 

618 self.precision = precision 

619 self.scale = scale 

620 self.decimal_return_scale = decimal_return_scale 

621 self.asdecimal = asdecimal 

622 

623 @property 

624 def _effective_decimal_return_scale(self): 

625 if self.decimal_return_scale is not None: 

626 return self.decimal_return_scale 

627 elif getattr(self, "scale", None) is not None: 

628 return self.scale 

629 else: 

630 return self._default_decimal_return_scale 

631 

632 def get_dbapi_type(self, dbapi): 

633 return dbapi.NUMBER 

634 

635 def literal_processor(self, dialect): 

636 def process(value): 

637 return str(value) 

638 

639 return process 

640 

641 @property 

642 def python_type(self): 

643 if self.asdecimal: 

644 return decimal.Decimal 

645 else: 

646 return float 

647 

648 def bind_processor(self, dialect): 

649 if dialect.supports_native_decimal: 

650 return None 

651 else: 

652 return processors.to_float 

653 

654 def result_processor(self, dialect, coltype): 

655 if self.asdecimal: 

656 if dialect.supports_native_decimal: 

657 # we're a "numeric", DBAPI will give us Decimal directly 

658 return None 

659 else: 

660 util.warn( 

661 "Dialect %s+%s does *not* support Decimal " 

662 "objects natively, and SQLAlchemy must " 

663 "convert from floating point - rounding " 

664 "errors and other issues may occur. Please " 

665 "consider storing Decimal numbers as strings " 

666 "or integers on this platform for lossless " 

667 "storage." % (dialect.name, dialect.driver) 

668 ) 

669 

670 # we're a "numeric", DBAPI returns floats, convert. 

671 return processors.to_decimal_processor_factory( 

672 decimal.Decimal, 

673 self.scale 

674 if self.scale is not None 

675 else self._default_decimal_return_scale, 

676 ) 

677 else: 

678 if dialect.supports_native_decimal: 

679 return processors.to_float 

680 else: 

681 return None 

682 

683 @util.memoized_property 

684 def _expression_adaptations(self): 

685 return { 

686 operators.mul: { 

687 Interval: Interval, 

688 Numeric: self.__class__, 

689 Integer: self.__class__, 

690 }, 

691 operators.div: {Numeric: self.__class__, Integer: self.__class__}, 

692 operators.truediv: { 

693 Numeric: self.__class__, 

694 Integer: self.__class__, 

695 }, 

696 operators.add: {Numeric: self.__class__, Integer: self.__class__}, 

697 operators.sub: {Numeric: self.__class__, Integer: self.__class__}, 

698 } 

699 

700 

701class Float(Numeric): 

702 

703 """Type representing floating point types, such as ``FLOAT`` or ``REAL``. 

704 

705 This type returns Python ``float`` objects by default, unless the 

706 :paramref:`.Float.asdecimal` flag is set to True, in which case they 

707 are coerced to ``decimal.Decimal`` objects. 

708 

709 .. note:: 

710 

711 The :class:`.Float` type is designed to receive data from a database 

712 type that is explicitly known to be a floating point type 

713 (e.g. ``FLOAT``, ``REAL``, others) 

714 and not a decimal type (e.g. ``DECIMAL``, ``NUMERIC``, others). 

715 If the database column on the server is in fact a Numeric 

716 type, such as ``DECIMAL`` or ``NUMERIC``, use the :class:`.Numeric` 

717 type or a subclass, otherwise numeric coercion between 

718 ``float``/``Decimal`` may or may not function as expected. 

719 

720 """ 

721 

722 __visit_name__ = "float" 

723 

724 scale = None 

725 

726 def __init__( 

727 self, precision=None, asdecimal=False, decimal_return_scale=None 

728 ): 

729 r""" 

730 Construct a Float. 

731 

732 :param precision: the numeric precision for use in DDL ``CREATE 

733 TABLE``. 

734 

735 :param asdecimal: the same flag as that of :class:`.Numeric`, but 

736 defaults to ``False``. Note that setting this flag to ``True`` 

737 results in floating point conversion. 

738 

739 :param decimal_return_scale: Default scale to use when converting 

740 from floats to Python decimals. Floating point values will typically 

741 be much longer due to decimal inaccuracy, and most floating point 

742 database types don't have a notion of "scale", so by default the 

743 float type looks for the first ten decimal places when converting. 

744 Specifying this value will override that length. Note that the 

745 MySQL float types, which do include "scale", will use "scale" 

746 as the default for decimal_return_scale, if not otherwise specified. 

747 

748 .. versionadded:: 0.9.0 

749 

750 """ 

751 self.precision = precision 

752 self.asdecimal = asdecimal 

753 self.decimal_return_scale = decimal_return_scale 

754 

755 def result_processor(self, dialect, coltype): 

756 if self.asdecimal: 

757 return processors.to_decimal_processor_factory( 

758 decimal.Decimal, self._effective_decimal_return_scale 

759 ) 

760 elif dialect.supports_native_decimal: 

761 return processors.to_float 

762 else: 

763 return None 

764 

765 

766class DateTime(_LookupExpressionAdapter, TypeEngine): 

767 

768 """A type for ``datetime.datetime()`` objects. 

769 

770 Date and time types return objects from the Python ``datetime`` 

771 module. Most DBAPIs have built in support for the datetime 

772 module, with the noted exception of SQLite. In the case of 

773 SQLite, date and time types are stored as strings which are then 

774 converted back to datetime objects when rows are returned. 

775 

776 For the time representation within the datetime type, some 

777 backends include additional options, such as timezone support and 

778 fractional seconds support. For fractional seconds, use the 

779 dialect-specific datatype, such as :class:`.mysql.TIME`. For 

780 timezone support, use at least the :class:`_types.TIMESTAMP` datatype, 

781 if not the dialect-specific datatype object. 

782 

783 """ 

784 

785 __visit_name__ = "datetime" 

786 

787 def __init__(self, timezone=False): 

788 """Construct a new :class:`.DateTime`. 

789 

790 :param timezone: boolean. Indicates that the datetime type should 

791 enable timezone support, if available on the 

792 **base date/time-holding type only**. It is recommended 

793 to make use of the :class:`_types.TIMESTAMP` datatype directly when 

794 using this flag, as some databases include separate generic 

795 date/time-holding types distinct from the timezone-capable 

796 TIMESTAMP datatype, such as Oracle. 

797 

798 

799 """ 

800 self.timezone = timezone 

801 

802 def get_dbapi_type(self, dbapi): 

803 return dbapi.DATETIME 

804 

805 @property 

806 def python_type(self): 

807 return dt.datetime 

808 

809 @util.memoized_property 

810 def _expression_adaptations(self): 

811 

812 # Based on http://www.postgresql.org/docs/current/\ 

813 # static/functions-datetime.html. 

814 

815 return { 

816 operators.add: {Interval: self.__class__}, 

817 operators.sub: {Interval: self.__class__, DateTime: Interval}, 

818 } 

819 

820 

821class Date(_LookupExpressionAdapter, TypeEngine): 

822 

823 """A type for ``datetime.date()`` objects.""" 

824 

825 __visit_name__ = "date" 

826 

827 def get_dbapi_type(self, dbapi): 

828 return dbapi.DATETIME 

829 

830 @property 

831 def python_type(self): 

832 return dt.date 

833 

834 @util.memoized_property 

835 def _expression_adaptations(self): 

836 # Based on http://www.postgresql.org/docs/current/\ 

837 # static/functions-datetime.html. 

838 

839 return { 

840 operators.add: { 

841 Integer: self.__class__, 

842 Interval: DateTime, 

843 Time: DateTime, 

844 }, 

845 operators.sub: { 

846 # date - integer = date 

847 Integer: self.__class__, 

848 # date - date = integer. 

849 Date: Integer, 

850 Interval: DateTime, 

851 # date - datetime = interval, 

852 # this one is not in the PG docs 

853 # but works 

854 DateTime: Interval, 

855 }, 

856 } 

857 

858 

859class Time(_LookupExpressionAdapter, TypeEngine): 

860 

861 """A type for ``datetime.time()`` objects.""" 

862 

863 __visit_name__ = "time" 

864 

865 def __init__(self, timezone=False): 

866 self.timezone = timezone 

867 

868 def get_dbapi_type(self, dbapi): 

869 return dbapi.DATETIME 

870 

871 @property 

872 def python_type(self): 

873 return dt.time 

874 

875 @util.memoized_property 

876 def _expression_adaptations(self): 

877 # Based on http://www.postgresql.org/docs/current/\ 

878 # static/functions-datetime.html. 

879 

880 return { 

881 operators.add: {Date: DateTime, Interval: self.__class__}, 

882 operators.sub: {Time: Interval, Interval: self.__class__}, 

883 } 

884 

885 

886class _Binary(TypeEngine): 

887 

888 """Define base behavior for binary types.""" 

889 

890 def __init__(self, length=None): 

891 self.length = length 

892 

893 def literal_processor(self, dialect): 

894 def process(value): 

895 value = value.decode(dialect.encoding).replace("'", "''") 

896 return "'%s'" % value 

897 

898 return process 

899 

900 @property 

901 def python_type(self): 

902 return util.binary_type 

903 

904 # Python 3 - sqlite3 doesn't need the `Binary` conversion 

905 # here, though pg8000 does to indicate "bytea" 

906 def bind_processor(self, dialect): 

907 if dialect.dbapi is None: 

908 return None 

909 

910 DBAPIBinary = dialect.dbapi.Binary 

911 

912 def process(value): 

913 if value is not None: 

914 return DBAPIBinary(value) 

915 else: 

916 return None 

917 

918 return process 

919 

920 # Python 3 has native bytes() type 

921 # both sqlite3 and pg8000 seem to return it, 

922 # psycopg2 as of 2.5 returns 'memoryview' 

923 if util.py2k: 

924 

925 def result_processor(self, dialect, coltype): 

926 if util.jython: 

927 

928 def process(value): 

929 if value is not None: 

930 if isinstance(value, array.array): 

931 return value.tostring() 

932 return str(value) 

933 else: 

934 return None 

935 

936 else: 

937 process = processors.to_str 

938 return process 

939 

940 else: 

941 

942 def result_processor(self, dialect, coltype): 

943 def process(value): 

944 if value is not None: 

945 value = bytes(value) 

946 return value 

947 

948 return process 

949 

950 def coerce_compared_value(self, op, value): 

951 """See :meth:`.TypeEngine.coerce_compared_value` for a description.""" 

952 

953 if isinstance(value, util.string_types): 

954 return self 

955 else: 

956 return super(_Binary, self).coerce_compared_value(op, value) 

957 

958 def get_dbapi_type(self, dbapi): 

959 return dbapi.BINARY 

960 

961 

962class LargeBinary(_Binary): 

963 

964 """A type for large binary byte data. 

965 

966 The :class:`.LargeBinary` type corresponds to a large and/or unlengthed 

967 binary type for the target platform, such as BLOB on MySQL and BYTEA for 

968 PostgreSQL. It also handles the necessary conversions for the DBAPI. 

969 

970 """ 

971 

972 __visit_name__ = "large_binary" 

973 

974 def __init__(self, length=None): 

975 """ 

976 Construct a LargeBinary type. 

977 

978 :param length: optional, a length for the column for use in 

979 DDL statements, for those binary types that accept a length, 

980 such as the MySQL BLOB type. 

981 

982 """ 

983 _Binary.__init__(self, length=length) 

984 

985 

986@util.deprecated_cls( 

987 "0.6", 

988 "The :class:`.Binary` class is deprecated and will be removed " 

989 "in a future relase. Please use :class:`.LargeBinary`.", 

990) 

991class Binary(LargeBinary): 

992 def __init__(self, *arg, **kw): 

993 LargeBinary.__init__(self, *arg, **kw) 

994 

995 

996class SchemaType(SchemaEventTarget): 

997 

998 """Mark a type as possibly requiring schema-level DDL for usage. 

999 

1000 Supports types that must be explicitly created/dropped (i.e. PG ENUM type) 

1001 as well as types that are complimented by table or schema level 

1002 constraints, triggers, and other rules. 

1003 

1004 :class:`.SchemaType` classes can also be targets for the 

1005 :meth:`.DDLEvents.before_parent_attach` and 

1006 :meth:`.DDLEvents.after_parent_attach` events, where the events fire off 

1007 surrounding the association of the type object with a parent 

1008 :class:`_schema.Column`. 

1009 

1010 .. seealso:: 

1011 

1012 :class:`.Enum` 

1013 

1014 :class:`.Boolean` 

1015 

1016 

1017 """ 

1018 

1019 def __init__( 

1020 self, 

1021 name=None, 

1022 schema=None, 

1023 metadata=None, 

1024 inherit_schema=False, 

1025 quote=None, 

1026 _create_events=True, 

1027 ): 

1028 if name is not None: 

1029 self.name = quoted_name(name, quote) 

1030 else: 

1031 self.name = None 

1032 self.schema = schema 

1033 self.metadata = metadata 

1034 self.inherit_schema = inherit_schema 

1035 self._create_events = _create_events 

1036 

1037 if _create_events and self.metadata: 

1038 event.listen( 

1039 self.metadata, 

1040 "before_create", 

1041 util.portable_instancemethod(self._on_metadata_create), 

1042 ) 

1043 event.listen( 

1044 self.metadata, 

1045 "after_drop", 

1046 util.portable_instancemethod(self._on_metadata_drop), 

1047 ) 

1048 

1049 def _translate_schema(self, effective_schema, map_): 

1050 return map_.get(effective_schema, effective_schema) 

1051 

1052 def _set_parent(self, column): 

1053 column._on_table_attach(util.portable_instancemethod(self._set_table)) 

1054 

1055 def _variant_mapping_for_set_table(self, column): 

1056 if isinstance(column.type, Variant): 

1057 variant_mapping = column.type.mapping.copy() 

1058 variant_mapping["_default"] = column.type.impl 

1059 else: 

1060 variant_mapping = None 

1061 return variant_mapping 

1062 

1063 def _set_table(self, column, table): 

1064 if self.inherit_schema: 

1065 self.schema = table.schema 

1066 

1067 if not self._create_events: 

1068 return 

1069 

1070 variant_mapping = self._variant_mapping_for_set_table(column) 

1071 

1072 event.listen( 

1073 table, 

1074 "before_create", 

1075 util.portable_instancemethod( 

1076 self._on_table_create, {"variant_mapping": variant_mapping} 

1077 ), 

1078 ) 

1079 event.listen( 

1080 table, 

1081 "after_drop", 

1082 util.portable_instancemethod( 

1083 self._on_table_drop, {"variant_mapping": variant_mapping} 

1084 ), 

1085 ) 

1086 if self.metadata is None: 

1087 # TODO: what's the difference between self.metadata 

1088 # and table.metadata here ? 

1089 event.listen( 

1090 table.metadata, 

1091 "before_create", 

1092 util.portable_instancemethod( 

1093 self._on_metadata_create, 

1094 {"variant_mapping": variant_mapping}, 

1095 ), 

1096 ) 

1097 event.listen( 

1098 table.metadata, 

1099 "after_drop", 

1100 util.portable_instancemethod( 

1101 self._on_metadata_drop, 

1102 {"variant_mapping": variant_mapping}, 

1103 ), 

1104 ) 

1105 

1106 def copy(self, **kw): 

1107 return self.adapt(self.__class__, _create_events=True) 

1108 

1109 def adapt(self, impltype, **kw): 

1110 schema = kw.pop("schema", self.schema) 

1111 metadata = kw.pop("metadata", self.metadata) 

1112 _create_events = kw.pop("_create_events", False) 

1113 return impltype( 

1114 name=self.name, 

1115 schema=schema, 

1116 inherit_schema=self.inherit_schema, 

1117 metadata=metadata, 

1118 _create_events=_create_events, 

1119 **kw 

1120 ) 

1121 

1122 @property 

1123 def bind(self): 

1124 return self.metadata and self.metadata.bind or None 

1125 

1126 def create(self, bind=None, checkfirst=False): 

1127 """Issue CREATE ddl for this type, if applicable.""" 

1128 

1129 if bind is None: 

1130 bind = _bind_or_error(self) 

1131 t = self.dialect_impl(bind.dialect) 

1132 if t.__class__ is not self.__class__ and isinstance(t, SchemaType): 

1133 t.create(bind=bind, checkfirst=checkfirst) 

1134 

1135 def drop(self, bind=None, checkfirst=False): 

1136 """Issue DROP ddl for this type, if applicable.""" 

1137 

1138 if bind is None: 

1139 bind = _bind_or_error(self) 

1140 t = self.dialect_impl(bind.dialect) 

1141 if t.__class__ is not self.__class__ and isinstance(t, SchemaType): 

1142 t.drop(bind=bind, checkfirst=checkfirst) 

1143 

1144 def _on_table_create(self, target, bind, **kw): 

1145 if not self._is_impl_for_variant(bind.dialect, kw): 

1146 return 

1147 

1148 t = self.dialect_impl(bind.dialect) 

1149 if t.__class__ is not self.__class__ and isinstance(t, SchemaType): 

1150 t._on_table_create(target, bind, **kw) 

1151 

1152 def _on_table_drop(self, target, bind, **kw): 

1153 if not self._is_impl_for_variant(bind.dialect, kw): 

1154 return 

1155 

1156 t = self.dialect_impl(bind.dialect) 

1157 if t.__class__ is not self.__class__ and isinstance(t, SchemaType): 

1158 t._on_table_drop(target, bind, **kw) 

1159 

1160 def _on_metadata_create(self, target, bind, **kw): 

1161 if not self._is_impl_for_variant(bind.dialect, kw): 

1162 return 

1163 

1164 t = self.dialect_impl(bind.dialect) 

1165 if t.__class__ is not self.__class__ and isinstance(t, SchemaType): 

1166 t._on_metadata_create(target, bind, **kw) 

1167 

1168 def _on_metadata_drop(self, target, bind, **kw): 

1169 if not self._is_impl_for_variant(bind.dialect, kw): 

1170 return 

1171 

1172 t = self.dialect_impl(bind.dialect) 

1173 if t.__class__ is not self.__class__ and isinstance(t, SchemaType): 

1174 t._on_metadata_drop(target, bind, **kw) 

1175 

1176 def _is_impl_for_variant(self, dialect, kw): 

1177 variant_mapping = kw.pop("variant_mapping", None) 

1178 if variant_mapping is None: 

1179 return True 

1180 

1181 if ( 

1182 dialect.name in variant_mapping 

1183 and variant_mapping[dialect.name] is self 

1184 ): 

1185 return True 

1186 elif dialect.name not in variant_mapping: 

1187 return variant_mapping["_default"] is self 

1188 

1189 

1190class Enum(Emulated, String, SchemaType): 

1191 """Generic Enum Type. 

1192 

1193 The :class:`.Enum` type provides a set of possible string values 

1194 which the column is constrained towards. 

1195 

1196 The :class:`.Enum` type will make use of the backend's native "ENUM" 

1197 type if one is available; otherwise, it uses a VARCHAR datatype and 

1198 produces a CHECK constraint. Use of the backend-native enum type 

1199 can be disabled using the :paramref:`.Enum.native_enum` flag, and 

1200 the production of the CHECK constraint is configurable using the 

1201 :paramref:`.Enum.create_constraint` flag. 

1202 

1203 The :class:`.Enum` type also provides in-Python validation of string 

1204 values during both read and write operations. When reading a value 

1205 from the database in a result set, the string value is always checked 

1206 against the list of possible values and a ``LookupError`` is raised 

1207 if no match is found. When passing a value to the database as a 

1208 plain string within a SQL statement, if the 

1209 :paramref:`.Enum.validate_strings` parameter is 

1210 set to True, a ``LookupError`` is raised for any string value that's 

1211 not located in the given list of possible values; note that this 

1212 impacts usage of LIKE expressions with enumerated values (an unusual 

1213 use case). 

1214 

1215 .. versionchanged:: 1.1 the :class:`.Enum` type now provides in-Python 

1216 validation of input values as well as on data being returned by 

1217 the database. 

1218 

1219 The source of enumerated values may be a list of string values, or 

1220 alternatively a PEP-435-compliant enumerated class. For the purposes 

1221 of the :class:`.Enum` datatype, this class need only provide a 

1222 ``__members__`` method. 

1223 

1224 When using an enumerated class, the enumerated objects are used 

1225 both for input and output, rather than strings as is the case with 

1226 a plain-string enumerated type:: 

1227 

1228 import enum 

1229 class MyEnum(enum.Enum): 

1230 one = 1 

1231 two = 2 

1232 three = 3 

1233 

1234 t = Table( 

1235 'data', MetaData(), 

1236 Column('value', Enum(MyEnum)) 

1237 ) 

1238 

1239 connection.execute(t.insert(), {"value": MyEnum.two}) 

1240 assert connection.scalar(t.select()) is MyEnum.two 

1241 

1242 Above, the string names of each element, e.g. "one", "two", "three", 

1243 are persisted to the database; the values of the Python Enum, here 

1244 indicated as integers, are **not** used; the value of each enum can 

1245 therefore be any kind of Python object whether or not it is persistable. 

1246 

1247 In order to persist the values and not the names, the 

1248 :paramref:`.Enum.values_callable` parameter may be used. The value of 

1249 this parameter is a user-supplied callable, which is intended to be used 

1250 with a PEP-435-compliant enumerated class and returns a list of string 

1251 values to be persisted. For a simple enumeration that uses string values, 

1252 a callable such as ``lambda x: [e.value for e in x]`` is sufficient. 

1253 

1254 .. versionadded:: 1.1 - support for PEP-435-style enumerated 

1255 classes. 

1256 

1257 

1258 .. seealso:: 

1259 

1260 :class:`_postgresql.ENUM` - PostgreSQL-specific type, 

1261 which has additional functionality. 

1262 

1263 :class:`.mysql.ENUM` - MySQL-specific type 

1264 

1265 """ 

1266 

1267 __visit_name__ = "enum" 

1268 

1269 @util.deprecated_params( 

1270 convert_unicode=( 

1271 "1.3", 

1272 "The :paramref:`.Enum.convert_unicode` parameter is deprecated " 

1273 "and will be removed in a future release. All modern DBAPIs " 

1274 "now support Python Unicode directly and this parameter is " 

1275 "unnecessary.", 

1276 ) 

1277 ) 

1278 def __init__(self, *enums, **kw): 

1279 r"""Construct an enum. 

1280 

1281 Keyword arguments which don't apply to a specific backend are ignored 

1282 by that backend. 

1283 

1284 :param \*enums: either exactly one PEP-435 compliant enumerated type 

1285 or one or more string or unicode enumeration labels. If unicode 

1286 labels are present, the `convert_unicode` flag is auto-enabled. 

1287 

1288 .. versionadded:: 1.1 a PEP-435 style enumerated class may be 

1289 passed. 

1290 

1291 :param convert_unicode: Enable unicode-aware bind parameter and 

1292 result-set processing for this Enum's data. This is set 

1293 automatically based on the presence of unicode label strings. 

1294 

1295 :param create_constraint: defaults to True. When creating a non-native 

1296 enumerated type, also build a CHECK constraint on the database 

1297 against the valid values. 

1298 

1299 .. versionadded:: 1.1 - added :paramref:`.Enum.create_constraint` 

1300 which provides the option to disable the production of the 

1301 CHECK constraint for a non-native enumerated type. 

1302 

1303 :param metadata: Associate this type directly with a ``MetaData`` 

1304 object. For types that exist on the target database as an 

1305 independent schema construct (PostgreSQL), this type will be 

1306 created and dropped within ``create_all()`` and ``drop_all()`` 

1307 operations. If the type is not associated with any ``MetaData`` 

1308 object, it will associate itself with each ``Table`` in which it is 

1309 used, and will be created when any of those individual tables are 

1310 created, after a check is performed for its existence. The type is 

1311 only dropped when ``drop_all()`` is called for that ``Table`` 

1312 object's metadata, however. 

1313 

1314 :param name: The name of this type. This is required for PostgreSQL 

1315 and any future supported database which requires an explicitly 

1316 named type, or an explicitly named constraint in order to generate 

1317 the type and/or a table that uses it. If a PEP-435 enumerated 

1318 class was used, its name (converted to lower case) is used by 

1319 default. 

1320 

1321 :param native_enum: Use the database's native ENUM type when 

1322 available. Defaults to True. When False, uses VARCHAR + check 

1323 constraint for all backends. The VARCHAR length can be controlled 

1324 with :paramref:`.Enum.length` 

1325 

1326 :param length: Allows specifying a custom length for the VARCHAR 

1327 when :paramref:`.Enum.native_enum` is False. By default it uses the 

1328 length of the longest value. 

1329 

1330 .. versionadded:: 1.3.16 

1331 

1332 :param schema: Schema name of this type. For types that exist on the 

1333 target database as an independent schema construct (PostgreSQL), 

1334 this parameter specifies the named schema in which the type is 

1335 present. 

1336 

1337 .. note:: 

1338 

1339 The ``schema`` of the :class:`.Enum` type does not 

1340 by default make use of the ``schema`` established on the 

1341 owning :class:`_schema.Table`. If this behavior is desired, 

1342 set the ``inherit_schema`` flag to ``True``. 

1343 

1344 :param quote: Set explicit quoting preferences for the type's name. 

1345 

1346 :param inherit_schema: When ``True``, the "schema" from the owning 

1347 :class:`_schema.Table` 

1348 will be copied to the "schema" attribute of this 

1349 :class:`.Enum`, replacing whatever value was passed for the 

1350 ``schema`` attribute. This also takes effect when using the 

1351 :meth:`_schema.Table.tometadata` operation. 

1352 

1353 :param validate_strings: when True, string values that are being 

1354 passed to the database in a SQL statement will be checked 

1355 for validity against the list of enumerated values. Unrecognized 

1356 values will result in a ``LookupError`` being raised. 

1357 

1358 .. versionadded:: 1.1.0b2 

1359 

1360 :param values_callable: A callable which will be passed the PEP-435 

1361 compliant enumerated type, which should then return a list of string 

1362 values to be persisted. This allows for alternate usages such as 

1363 using the string value of an enum to be persisted to the database 

1364 instead of its name. 

1365 

1366 .. versionadded:: 1.2.3 

1367 

1368 :param sort_key_function: a Python callable which may be used as the 

1369 "key" argument in the Python ``sorted()`` built-in. The SQLAlchemy 

1370 ORM requires that primary key columns which are mapped must 

1371 be sortable in some way. When using an unsortable enumeration 

1372 object such as a Python 3 ``Enum`` object, this parameter may be 

1373 used to set a default sort key function for the objects. By 

1374 default, the database value of the enumeration is used as the 

1375 sorting function. 

1376 

1377 .. versionadded:: 1.3.8 

1378 

1379 

1380 

1381 """ 

1382 self._enum_init(enums, kw) 

1383 

1384 @property 

1385 def _enums_argument(self): 

1386 if self.enum_class is not None: 

1387 return [self.enum_class] 

1388 else: 

1389 return self.enums 

1390 

1391 def _enum_init(self, enums, kw): 

1392 """internal init for :class:`.Enum` and subclasses. 

1393 

1394 friendly init helper used by subclasses to remove 

1395 all the Enum-specific keyword arguments from kw. Allows all 

1396 other arguments in kw to pass through. 

1397 

1398 """ 

1399 self.native_enum = kw.pop("native_enum", True) 

1400 self.create_constraint = kw.pop("create_constraint", True) 

1401 self.values_callable = kw.pop("values_callable", None) 

1402 self._sort_key_function = kw.pop("sort_key_function", NO_ARG) 

1403 length_arg = kw.pop("length", NO_ARG) 

1404 

1405 values, objects = self._parse_into_values(enums, kw) 

1406 self._setup_for_values(values, objects, kw) 

1407 

1408 convert_unicode = kw.pop("convert_unicode", None) 

1409 self.validate_strings = kw.pop("validate_strings", False) 

1410 

1411 if convert_unicode is None: 

1412 for e in self.enums: 

1413 # this is all py2k logic that can go away for py3k only, 

1414 # "expect unicode" will always be implicitly true 

1415 if isinstance(e, util.text_type): 

1416 _expect_unicode = True 

1417 break 

1418 else: 

1419 _expect_unicode = False 

1420 else: 

1421 _expect_unicode = convert_unicode 

1422 

1423 if self.enums: 

1424 length = max(len(x) for x in self.enums) 

1425 else: 

1426 length = 0 

1427 if not self.native_enum and length_arg is not NO_ARG: 

1428 if length_arg < length: 

1429 raise ValueError( 

1430 "When provided, length must be larger or equal" 

1431 " than the length of the longest enum value. %s < %s" 

1432 % (length_arg, length) 

1433 ) 

1434 length = length_arg 

1435 

1436 self._valid_lookup[None] = self._object_lookup[None] = None 

1437 

1438 super(Enum, self).__init__( 

1439 length=length, _expect_unicode=_expect_unicode 

1440 ) 

1441 

1442 if self.enum_class: 

1443 kw.setdefault("name", self.enum_class.__name__.lower()) 

1444 SchemaType.__init__( 

1445 self, 

1446 name=kw.pop("name", None), 

1447 schema=kw.pop("schema", None), 

1448 metadata=kw.pop("metadata", None), 

1449 inherit_schema=kw.pop("inherit_schema", False), 

1450 quote=kw.pop("quote", None), 

1451 _create_events=kw.pop("_create_events", True), 

1452 ) 

1453 

1454 def _parse_into_values(self, enums, kw): 

1455 if not enums and "_enums" in kw: 

1456 enums = kw.pop("_enums") 

1457 

1458 if len(enums) == 1 and hasattr(enums[0], "__members__"): 

1459 self.enum_class = enums[0] 

1460 members = self.enum_class.__members__ 

1461 if self.values_callable: 

1462 values = self.values_callable(self.enum_class) 

1463 else: 

1464 values = list(members) 

1465 objects = [members[k] for k in members] 

1466 return values, objects 

1467 else: 

1468 self.enum_class = None 

1469 return enums, enums 

1470 

1471 def _setup_for_values(self, values, objects, kw): 

1472 self.enums = list(values) 

1473 

1474 self._valid_lookup = dict(zip(reversed(objects), reversed(values))) 

1475 

1476 self._object_lookup = dict(zip(values, objects)) 

1477 

1478 self._valid_lookup.update( 

1479 [ 

1480 (value, self._valid_lookup[self._object_lookup[value]]) 

1481 for value in values 

1482 ] 

1483 ) 

1484 

1485 @property 

1486 def sort_key_function(self): 

1487 if self._sort_key_function is NO_ARG: 

1488 return self._db_value_for_elem 

1489 else: 

1490 return self._sort_key_function 

1491 

1492 @property 

1493 def native(self): 

1494 return self.native_enum 

1495 

1496 def _db_value_for_elem(self, elem): 

1497 try: 

1498 return self._valid_lookup[elem] 

1499 except KeyError as err: 

1500 # for unknown string values, we return as is. While we can 

1501 # validate these if we wanted, that does not allow for lesser-used 

1502 # end-user use cases, such as using a LIKE comparison with an enum, 

1503 # or for an application that wishes to apply string tests to an 

1504 # ENUM (see [ticket:3725]). While we can decide to differentiate 

1505 # here between an INSERT statement and a criteria used in a SELECT, 

1506 # for now we're staying conservative w/ behavioral changes (perhaps 

1507 # someone has a trigger that handles strings on INSERT) 

1508 if not self.validate_strings and isinstance( 

1509 elem, compat.string_types 

1510 ): 

1511 return elem 

1512 else: 

1513 util.raise_( 

1514 LookupError( 

1515 '"%s" is not among the defined enum values' % elem 

1516 ), 

1517 replace_context=err, 

1518 ) 

1519 

1520 class Comparator(String.Comparator): 

1521 def _adapt_expression(self, op, other_comparator): 

1522 op, typ = super(Enum.Comparator, self)._adapt_expression( 

1523 op, other_comparator 

1524 ) 

1525 if op is operators.concat_op: 

1526 typ = String( 

1527 self.type.length, _expect_unicode=self.type._expect_unicode 

1528 ) 

1529 return op, typ 

1530 

1531 comparator_factory = Comparator 

1532 

1533 def _object_value_for_elem(self, elem): 

1534 try: 

1535 return self._object_lookup[elem] 

1536 except KeyError as err: 

1537 util.raise_( 

1538 LookupError( 

1539 '"%s" is not among the defined enum values' % elem 

1540 ), 

1541 replace_context=err, 

1542 ) 

1543 

1544 def __repr__(self): 

1545 return util.generic_repr( 

1546 self, 

1547 additional_kw=[("native_enum", True)], 

1548 to_inspect=[Enum, SchemaType], 

1549 ) 

1550 

1551 def adapt_to_emulated(self, impltype, **kw): 

1552 kw.setdefault("_expect_unicode", self._expect_unicode) 

1553 kw.setdefault("validate_strings", self.validate_strings) 

1554 kw.setdefault("name", self.name) 

1555 kw.setdefault("schema", self.schema) 

1556 kw.setdefault("inherit_schema", self.inherit_schema) 

1557 kw.setdefault("metadata", self.metadata) 

1558 kw.setdefault("_create_events", False) 

1559 kw.setdefault("native_enum", self.native_enum) 

1560 kw.setdefault("values_callable", self.values_callable) 

1561 kw.setdefault("create_constraint", self.create_constraint) 

1562 kw.setdefault("length", self.length) 

1563 assert "_enums" in kw 

1564 return impltype(**kw) 

1565 

1566 def adapt(self, impltype, **kw): 

1567 kw["_enums"] = self._enums_argument 

1568 return super(Enum, self).adapt(impltype, **kw) 

1569 

1570 def _should_create_constraint(self, compiler, **kw): 

1571 if not self._is_impl_for_variant(compiler.dialect, kw): 

1572 return False 

1573 return ( 

1574 not self.native_enum or not compiler.dialect.supports_native_enum 

1575 ) 

1576 

1577 @util.dependencies("sqlalchemy.sql.schema") 

1578 def _set_table(self, schema, column, table): 

1579 SchemaType._set_table(self, column, table) 

1580 

1581 if not self.create_constraint: 

1582 return 

1583 

1584 variant_mapping = self._variant_mapping_for_set_table(column) 

1585 

1586 e = schema.CheckConstraint( 

1587 type_coerce(column, self).in_(self.enums), 

1588 name=_defer_name(self.name), 

1589 _create_rule=util.portable_instancemethod( 

1590 self._should_create_constraint, 

1591 {"variant_mapping": variant_mapping}, 

1592 ), 

1593 _type_bound=True, 

1594 ) 

1595 assert e.table is table 

1596 

1597 def literal_processor(self, dialect): 

1598 parent_processor = super(Enum, self).literal_processor(dialect) 

1599 

1600 def process(value): 

1601 value = self._db_value_for_elem(value) 

1602 if parent_processor: 

1603 value = parent_processor(value) 

1604 return value 

1605 

1606 return process 

1607 

1608 def bind_processor(self, dialect): 

1609 def process(value): 

1610 value = self._db_value_for_elem(value) 

1611 if parent_processor: 

1612 value = parent_processor(value) 

1613 return value 

1614 

1615 parent_processor = super(Enum, self).bind_processor(dialect) 

1616 return process 

1617 

1618 def result_processor(self, dialect, coltype): 

1619 parent_processor = super(Enum, self).result_processor(dialect, coltype) 

1620 

1621 def process(value): 

1622 if parent_processor: 

1623 value = parent_processor(value) 

1624 

1625 value = self._object_value_for_elem(value) 

1626 return value 

1627 

1628 return process 

1629 

1630 def copy(self, **kw): 

1631 return SchemaType.copy(self, **kw) 

1632 

1633 @property 

1634 def python_type(self): 

1635 if self.enum_class: 

1636 return self.enum_class 

1637 else: 

1638 return super(Enum, self).python_type 

1639 

1640 

1641class PickleType(TypeDecorator): 

1642 """Holds Python objects, which are serialized using pickle. 

1643 

1644 PickleType builds upon the Binary type to apply Python's 

1645 ``pickle.dumps()`` to incoming objects, and ``pickle.loads()`` on 

1646 the way out, allowing any pickleable Python object to be stored as 

1647 a serialized binary field. 

1648 

1649 To allow ORM change events to propagate for elements associated 

1650 with :class:`.PickleType`, see :ref:`mutable_toplevel`. 

1651 

1652 """ 

1653 

1654 impl = LargeBinary 

1655 

1656 def __init__( 

1657 self, protocol=pickle.HIGHEST_PROTOCOL, pickler=None, comparator=None 

1658 ): 

1659 """ 

1660 Construct a PickleType. 

1661 

1662 :param protocol: defaults to ``pickle.HIGHEST_PROTOCOL``. 

1663 

1664 :param pickler: defaults to cPickle.pickle or pickle.pickle if 

1665 cPickle is not available. May be any object with 

1666 pickle-compatible ``dumps`` and ``loads`` methods. 

1667 

1668 :param comparator: a 2-arg callable predicate used 

1669 to compare values of this type. If left as ``None``, 

1670 the Python "equals" operator is used to compare values. 

1671 

1672 """ 

1673 self.protocol = protocol 

1674 self.pickler = pickler or pickle 

1675 self.comparator = comparator 

1676 super(PickleType, self).__init__() 

1677 

1678 def __reduce__(self): 

1679 return PickleType, (self.protocol, None, self.comparator) 

1680 

1681 def bind_processor(self, dialect): 

1682 impl_processor = self.impl.bind_processor(dialect) 

1683 dumps = self.pickler.dumps 

1684 protocol = self.protocol 

1685 if impl_processor: 

1686 

1687 def process(value): 

1688 if value is not None: 

1689 value = dumps(value, protocol) 

1690 return impl_processor(value) 

1691 

1692 else: 

1693 

1694 def process(value): 

1695 if value is not None: 

1696 value = dumps(value, protocol) 

1697 return value 

1698 

1699 return process 

1700 

1701 def result_processor(self, dialect, coltype): 

1702 impl_processor = self.impl.result_processor(dialect, coltype) 

1703 loads = self.pickler.loads 

1704 if impl_processor: 

1705 

1706 def process(value): 

1707 value = impl_processor(value) 

1708 if value is None: 

1709 return None 

1710 return loads(value) 

1711 

1712 else: 

1713 

1714 def process(value): 

1715 if value is None: 

1716 return None 

1717 return loads(value) 

1718 

1719 return process 

1720 

1721 def compare_values(self, x, y): 

1722 if self.comparator: 

1723 return self.comparator(x, y) 

1724 else: 

1725 return x == y 

1726 

1727 

1728class Boolean(Emulated, TypeEngine, SchemaType): 

1729 

1730 """A bool datatype. 

1731 

1732 :class:`.Boolean` typically uses BOOLEAN or SMALLINT on the DDL side, 

1733 and on the Python side deals in ``True`` or ``False``. 

1734 

1735 The :class:`.Boolean` datatype currently has two levels of assertion 

1736 that the values persisted are simple true/false values. For all 

1737 backends, only the Python values ``None``, ``True``, ``False``, ``1`` 

1738 or ``0`` are accepted as parameter values. For those backends that 

1739 don't support a "native boolean" datatype, a CHECK constraint is also 

1740 created on the target column. Production of the CHECK constraint 

1741 can be disabled by passing the :paramref:`.Boolean.create_constraint` 

1742 flag set to ``False``. 

1743 

1744 .. versionchanged:: 1.2 the :class:`.Boolean` datatype now asserts that 

1745 incoming Python values are already in pure boolean form. 

1746 

1747 

1748 """ 

1749 

1750 __visit_name__ = "boolean" 

1751 native = True 

1752 

1753 def __init__(self, create_constraint=True, name=None, _create_events=True): 

1754 """Construct a Boolean. 

1755 

1756 :param create_constraint: defaults to True. If the boolean 

1757 is generated as an int/smallint, also create a CHECK constraint 

1758 on the table that ensures 1 or 0 as a value. 

1759 

1760 :param name: if a CHECK constraint is generated, specify 

1761 the name of the constraint. 

1762 

1763 """ 

1764 self.create_constraint = create_constraint 

1765 self.name = name 

1766 self._create_events = _create_events 

1767 

1768 def _should_create_constraint(self, compiler, **kw): 

1769 if not self._is_impl_for_variant(compiler.dialect, kw): 

1770 return False 

1771 return ( 

1772 not compiler.dialect.supports_native_boolean 

1773 and compiler.dialect.non_native_boolean_check_constraint 

1774 ) 

1775 

1776 @util.dependencies("sqlalchemy.sql.schema") 

1777 def _set_table(self, schema, column, table): 

1778 if not self.create_constraint: 

1779 return 

1780 

1781 variant_mapping = self._variant_mapping_for_set_table(column) 

1782 

1783 e = schema.CheckConstraint( 

1784 type_coerce(column, self).in_([0, 1]), 

1785 name=_defer_name(self.name), 

1786 _create_rule=util.portable_instancemethod( 

1787 self._should_create_constraint, 

1788 {"variant_mapping": variant_mapping}, 

1789 ), 

1790 _type_bound=True, 

1791 ) 

1792 assert e.table is table 

1793 

1794 @property 

1795 def python_type(self): 

1796 return bool 

1797 

1798 _strict_bools = frozenset([None, True, False]) 

1799 

1800 def _strict_as_bool(self, value): 

1801 if value not in self._strict_bools: 

1802 if not isinstance(value, int): 

1803 raise TypeError("Not a boolean value: %r" % value) 

1804 else: 

1805 raise ValueError( 

1806 "Value %r is not None, True, or False" % value 

1807 ) 

1808 return value 

1809 

1810 def literal_processor(self, dialect): 

1811 compiler = dialect.statement_compiler(dialect, None) 

1812 true = compiler.visit_true(None) 

1813 false = compiler.visit_false(None) 

1814 

1815 def process(value): 

1816 return true if self._strict_as_bool(value) else false 

1817 

1818 return process 

1819 

1820 def bind_processor(self, dialect): 

1821 _strict_as_bool = self._strict_as_bool 

1822 if dialect.supports_native_boolean: 

1823 _coerce = bool 

1824 else: 

1825 _coerce = int 

1826 

1827 def process(value): 

1828 value = _strict_as_bool(value) 

1829 if value is not None: 

1830 value = _coerce(value) 

1831 return value 

1832 

1833 return process 

1834 

1835 def result_processor(self, dialect, coltype): 

1836 if dialect.supports_native_boolean: 

1837 return None 

1838 else: 

1839 return processors.int_to_boolean 

1840 

1841 

1842class _AbstractInterval(_LookupExpressionAdapter, TypeEngine): 

1843 @util.memoized_property 

1844 def _expression_adaptations(self): 

1845 # Based on http://www.postgresql.org/docs/current/\ 

1846 # static/functions-datetime.html. 

1847 

1848 return { 

1849 operators.add: { 

1850 Date: DateTime, 

1851 Interval: self.__class__, 

1852 DateTime: DateTime, 

1853 Time: Time, 

1854 }, 

1855 operators.sub: {Interval: self.__class__}, 

1856 operators.mul: {Numeric: self.__class__}, 

1857 operators.truediv: {Numeric: self.__class__}, 

1858 operators.div: {Numeric: self.__class__}, 

1859 } 

1860 

1861 @property 

1862 def _type_affinity(self): 

1863 return Interval 

1864 

1865 def coerce_compared_value(self, op, value): 

1866 """See :meth:`.TypeEngine.coerce_compared_value` for a description.""" 

1867 return self.impl.coerce_compared_value(op, value) 

1868 

1869 

1870class Interval(Emulated, _AbstractInterval, TypeDecorator): 

1871 

1872 """A type for ``datetime.timedelta()`` objects. 

1873 

1874 The Interval type deals with ``datetime.timedelta`` objects. In 

1875 PostgreSQL, the native ``INTERVAL`` type is used; for others, the 

1876 value is stored as a date which is relative to the "epoch" 

1877 (Jan. 1, 1970). 

1878 

1879 Note that the ``Interval`` type does not currently provide date arithmetic 

1880 operations on platforms which do not support interval types natively. Such 

1881 operations usually require transformation of both sides of the expression 

1882 (such as, conversion of both sides into integer epoch values first) which 

1883 currently is a manual procedure (such as via 

1884 :attr:`~sqlalchemy.sql.expression.func`). 

1885 

1886 """ 

1887 

1888 impl = DateTime 

1889 epoch = dt.datetime.utcfromtimestamp(0) 

1890 

1891 def __init__(self, native=True, second_precision=None, day_precision=None): 

1892 """Construct an Interval object. 

1893 

1894 :param native: when True, use the actual 

1895 INTERVAL type provided by the database, if 

1896 supported (currently PostgreSQL, Oracle). 

1897 Otherwise, represent the interval data as 

1898 an epoch value regardless. 

1899 

1900 :param second_precision: For native interval types 

1901 which support a "fractional seconds precision" parameter, 

1902 i.e. Oracle and PostgreSQL 

1903 

1904 :param day_precision: for native interval types which 

1905 support a "day precision" parameter, i.e. Oracle. 

1906 

1907 """ 

1908 super(Interval, self).__init__() 

1909 self.native = native 

1910 self.second_precision = second_precision 

1911 self.day_precision = day_precision 

1912 

1913 @property 

1914 def python_type(self): 

1915 return dt.timedelta 

1916 

1917 def adapt_to_emulated(self, impltype, **kw): 

1918 return _AbstractInterval.adapt(self, impltype, **kw) 

1919 

1920 def bind_processor(self, dialect): 

1921 impl_processor = self.impl.bind_processor(dialect) 

1922 epoch = self.epoch 

1923 if impl_processor: 

1924 

1925 def process(value): 

1926 if value is not None: 

1927 value = epoch + value 

1928 return impl_processor(value) 

1929 

1930 else: 

1931 

1932 def process(value): 

1933 if value is not None: 

1934 value = epoch + value 

1935 return value 

1936 

1937 return process 

1938 

1939 def result_processor(self, dialect, coltype): 

1940 impl_processor = self.impl.result_processor(dialect, coltype) 

1941 epoch = self.epoch 

1942 if impl_processor: 

1943 

1944 def process(value): 

1945 value = impl_processor(value) 

1946 if value is None: 

1947 return None 

1948 return value - epoch 

1949 

1950 else: 

1951 

1952 def process(value): 

1953 if value is None: 

1954 return None 

1955 return value - epoch 

1956 

1957 return process 

1958 

1959 

1960class JSON(Indexable, TypeEngine): 

1961 """Represent a SQL JSON type. 

1962 

1963 .. note:: :class:`_types.JSON` 

1964 is provided as a facade for vendor-specific 

1965 JSON types. Since it supports JSON SQL operations, it only 

1966 works on backends that have an actual JSON type, currently: 

1967 

1968 * PostgreSQL 

1969 

1970 * MySQL as of version 5.7 (MariaDB as of the 10.2 series does not) 

1971 

1972 * SQLite as of version 3.9 

1973 

1974 :class:`_types.JSON` is part of the Core in support of the growing 

1975 popularity of native JSON datatypes. 

1976 

1977 The :class:`_types.JSON` type stores arbitrary JSON format data, e.g.:: 

1978 

1979 data_table = Table('data_table', metadata, 

1980 Column('id', Integer, primary_key=True), 

1981 Column('data', JSON) 

1982 ) 

1983 

1984 with engine.connect() as conn: 

1985 conn.execute( 

1986 data_table.insert(), 

1987 data = {"key1": "value1", "key2": "value2"} 

1988 ) 

1989 

1990 **JSON-Specific Expression Operators** 

1991 

1992 The :class:`_types.JSON` 

1993 datatype provides these additional SQL operations: 

1994 

1995 * Keyed index operations:: 

1996 

1997 data_table.c.data['some key'] 

1998 

1999 * Integer index operations:: 

2000 

2001 data_table.c.data[3] 

2002 

2003 * Path index operations:: 

2004 

2005 data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')] 

2006 

2007 * Data casters for specific JSON element types, subsequent to an index 

2008 or path operation being invoked:: 

2009 

2010 data_table.c.data["some key"].as_integer() 

2011 

2012 .. versionadded:: 1.3.11 

2013 

2014 Additional operations may be available from the dialect-specific versions 

2015 of :class:`_types.JSON`, such as :class:`_postgresql.JSON` and 

2016 :class:`_postgresql.JSONB` which both offer additional PostgreSQL-specific 

2017 operations. 

2018 

2019 **Casting JSON Elements to Other Types** 

2020 

2021 Index operations, i.e. those invoked by calling upon the expression using 

2022 the Python bracket operator as in ``some_column['some key']``, return an 

2023 expression object whose type defaults to :class:`_types.JSON` by default, 

2024 so that 

2025 further JSON-oriented instructions may be called upon the result type. 

2026 However, it is likely more common that an index operation is expected 

2027 to return a specific scalar element, such as a string or integer. In 

2028 order to provide access to these elements in a backend-agnostic way, 

2029 a series of data casters are provided: 

2030 

2031 * :meth:`.JSON.Comparator.as_string` - return the element as a string 

2032 

2033 * :meth:`.JSON.Comparator.as_boolean` - return the element as a boolean 

2034 

2035 * :meth:`.JSON.Comparator.as_float` - return the element as a float 

2036 

2037 * :meth:`.JSON.Comparator.as_integer` - return the element as an integer 

2038 

2039 These data casters are implemented by supporting dialects in order to 

2040 assure that comparisons to the above types will work as expected, such as:: 

2041 

2042 # integer comparison 

2043 data_table.c.data["some_integer_key"].as_integer() == 5 

2044 

2045 # boolean comparison 

2046 data_table.c.data["some_boolean"].as_boolean() == True 

2047 

2048 .. versionadded:: 1.3.11 Added type-specific casters for the basic JSON 

2049 data element types. 

2050 

2051 .. note:: 

2052 

2053 The data caster functions are new in version 1.3.11, and supersede 

2054 the previous documented approaches of using CAST; for reference, 

2055 this looked like:: 

2056 

2057 from sqlalchemy import cast, type_coerce 

2058 from sqlalchemy import String, JSON 

2059 cast( 

2060 data_table.c.data['some_key'], String 

2061 ) == type_coerce(55, JSON) 

2062 

2063 The above case now works directly as:: 

2064 

2065 data_table.c.data['some_key'].as_integer() == 5 

2066 

2067 For details on the previous comparison approach within the 1.3.x 

2068 series, see the documentation for SQLAlchemy 1.2 or the included HTML 

2069 files in the doc/ directory of the version's distribution. 

2070 

2071 **Detecting Changes in JSON columns when using the ORM** 

2072 

2073 The :class:`_types.JSON` type, when used with the SQLAlchemy ORM, does not 

2074 detect in-place mutations to the structure. In order to detect these, the 

2075 :mod:`sqlalchemy.ext.mutable` extension must be used. This extension will 

2076 allow "in-place" changes to the datastructure to produce events which 

2077 will be detected by the unit of work. See the example at :class:`.HSTORE` 

2078 for a simple example involving a dictionary. 

2079 

2080 **Support for JSON null vs. SQL NULL** 

2081 

2082 When working with NULL values, the :class:`_types.JSON` 

2083 type recommends the 

2084 use of two specific constants in order to differentiate between a column 

2085 that evaluates to SQL NULL, e.g. no value, vs. the JSON-encoded string 

2086 of ``"null"``. To insert or select against a value that is SQL NULL, 

2087 use the constant :func:`.null`:: 

2088 

2089 from sqlalchemy import null 

2090 conn.execute(table.insert(), json_value=null()) 

2091 

2092 To insert or select against a value that is JSON ``"null"``, use the 

2093 constant :attr:`_types.JSON.NULL`:: 

2094 

2095 conn.execute(table.insert(), json_value=JSON.NULL) 

2096 

2097 The :class:`_types.JSON` type supports a flag 

2098 :paramref:`_types.JSON.none_as_null` which when set to True will result 

2099 in the Python constant ``None`` evaluating to the value of SQL 

2100 NULL, and when set to False results in the Python constant 

2101 ``None`` evaluating to the value of JSON ``"null"``. The Python 

2102 value ``None`` may be used in conjunction with either 

2103 :attr:`_types.JSON.NULL` and :func:`.null` in order to indicate NULL 

2104 values, but care must be taken as to the value of the 

2105 :paramref:`_types.JSON.none_as_null` in these cases. 

2106 

2107 **Customizing the JSON Serializer** 

2108 

2109 The JSON serializer and deserializer used by :class:`_types.JSON` 

2110 defaults to 

2111 Python's ``json.dumps`` and ``json.loads`` functions; in the case of the 

2112 psycopg2 dialect, psycopg2 may be using its own custom loader function. 

2113 

2114 In order to affect the serializer / deserializer, they are currently 

2115 configurable at the :func:`_sa.create_engine` level via the 

2116 :paramref:`_sa.create_engine.json_serializer` and 

2117 :paramref:`_sa.create_engine.json_deserializer` parameters. For example, 

2118 to turn off ``ensure_ascii``:: 

2119 

2120 engine = create_engine( 

2121 "sqlite://", 

2122 json_serializer=lambda obj: json.dumps(obj, ensure_ascii=False)) 

2123 

2124 .. versionchanged:: 1.3.7 

2125 

2126 SQLite dialect's ``json_serializer`` and ``json_deserializer`` 

2127 parameters renamed from ``_json_serializer`` and 

2128 ``_json_deserializer``. 

2129 

2130 .. seealso:: 

2131 

2132 :class:`_postgresql.JSON` 

2133 

2134 :class:`_postgresql.JSONB` 

2135 

2136 :class:`.mysql.JSON` 

2137 

2138 :class:`_sqlite.JSON` 

2139 

2140 .. versionadded:: 1.1 

2141 

2142 

2143 """ 

2144 

2145 __visit_name__ = "JSON" 

2146 

2147 hashable = False 

2148 NULL = util.symbol("JSON_NULL") 

2149 """Describe the json value of NULL. 

2150 

2151 This value is used to force the JSON value of ``"null"`` to be 

2152 used as the value. A value of Python ``None`` will be recognized 

2153 either as SQL NULL or JSON ``"null"``, based on the setting 

2154 of the :paramref:`_types.JSON.none_as_null` flag; the 

2155 :attr:`_types.JSON.NULL` 

2156 constant can be used to always resolve to JSON ``"null"`` regardless 

2157 of this setting. This is in contrast to the :func:`_expression.null` 

2158 construct, 

2159 which always resolves to SQL NULL. E.g.:: 

2160 

2161 from sqlalchemy import null 

2162 from sqlalchemy.dialects.postgresql import JSON 

2163 

2164 # will *always* insert SQL NULL 

2165 obj1 = MyObject(json_value=null()) 

2166 

2167 # will *always* insert JSON string "null" 

2168 obj2 = MyObject(json_value=JSON.NULL) 

2169 

2170 session.add_all([obj1, obj2]) 

2171 session.commit() 

2172 

2173 In order to set JSON NULL as a default value for a column, the most 

2174 transparent method is to use :func:`_expression.text`:: 

2175 

2176 Table( 

2177 'my_table', metadata, 

2178 Column('json_data', JSON, default=text("'null'")) 

2179 ) 

2180 

2181 While it is possible to use :attr:`_types.JSON.NULL` in this context, the 

2182 :attr:`_types.JSON.NULL` value will be returned as the value of the column 

2183 , 

2184 which in the context of the ORM or other repurposing of the default 

2185 value, may not be desirable. Using a SQL expression means the value 

2186 will be re-fetched from the database within the context of retrieving 

2187 generated defaults. 

2188 

2189 

2190 """ 

2191 

2192 def __init__(self, none_as_null=False): 

2193 """Construct a :class:`_types.JSON` type. 

2194 

2195 :param none_as_null=False: if True, persist the value ``None`` as a 

2196 SQL NULL value, not the JSON encoding of ``null``. Note that 

2197 when this flag is False, the :func:`.null` construct can still 

2198 be used to persist a NULL value:: 

2199 

2200 from sqlalchemy import null 

2201 conn.execute(table.insert(), data=null()) 

2202 

2203 .. note:: 

2204 

2205 :paramref:`_types.JSON.none_as_null` does **not** apply to the 

2206 values passed to :paramref:`_schema.Column.default` and 

2207 :paramref:`_schema.Column.server_default`; a value of ``None`` 

2208 passed for these parameters means "no default present". 

2209 

2210 .. seealso:: 

2211 

2212 :attr:`.types.JSON.NULL` 

2213 

2214 """ 

2215 self.none_as_null = none_as_null 

2216 

2217 class JSONElementType(TypeEngine): 

2218 """common function for index / path elements in a JSON expression.""" 

2219 

2220 _integer = Integer() 

2221 _string = String() 

2222 

2223 def string_bind_processor(self, dialect): 

2224 return self._string._cached_bind_processor(dialect) 

2225 

2226 def string_literal_processor(self, dialect): 

2227 return self._string._cached_literal_processor(dialect) 

2228 

2229 def bind_processor(self, dialect): 

2230 int_processor = self._integer._cached_bind_processor(dialect) 

2231 string_processor = self.string_bind_processor(dialect) 

2232 

2233 def process(value): 

2234 if int_processor and isinstance(value, int): 

2235 value = int_processor(value) 

2236 elif string_processor and isinstance(value, util.string_types): 

2237 value = string_processor(value) 

2238 return value 

2239 

2240 return process 

2241 

2242 def literal_processor(self, dialect): 

2243 int_processor = self._integer._cached_literal_processor(dialect) 

2244 string_processor = self.string_literal_processor(dialect) 

2245 

2246 def process(value): 

2247 if int_processor and isinstance(value, int): 

2248 value = int_processor(value) 

2249 elif string_processor and isinstance(value, util.string_types): 

2250 value = string_processor(value) 

2251 return value 

2252 

2253 return process 

2254 

2255 class JSONIndexType(JSONElementType): 

2256 """Placeholder for the datatype of a JSON index value. 

2257 

2258 This allows execution-time processing of JSON index values 

2259 for special syntaxes. 

2260 

2261 """ 

2262 

2263 class JSONPathType(JSONElementType): 

2264 """Placeholder type for JSON path operations. 

2265 

2266 This allows execution-time processing of a path-based 

2267 index value into a specific SQL syntax. 

2268 

2269 """ 

2270 

2271 class Comparator(Indexable.Comparator, Concatenable.Comparator): 

2272 """Define comparison operations for :class:`_types.JSON`.""" 

2273 

2274 @util.dependencies("sqlalchemy.sql.default_comparator") 

2275 def _setup_getitem(self, default_comparator, index): 

2276 if not isinstance(index, util.string_types) and isinstance( 

2277 index, compat.collections_abc.Sequence 

2278 ): 

2279 index = default_comparator._check_literal( 

2280 self.expr, 

2281 operators.json_path_getitem_op, 

2282 index, 

2283 bindparam_type=JSON.JSONPathType, 

2284 ) 

2285 

2286 operator = operators.json_path_getitem_op 

2287 else: 

2288 index = default_comparator._check_literal( 

2289 self.expr, 

2290 operators.json_getitem_op, 

2291 index, 

2292 bindparam_type=JSON.JSONIndexType, 

2293 ) 

2294 operator = operators.json_getitem_op 

2295 

2296 return operator, index, self.type 

2297 

2298 def as_boolean(self): 

2299 """Cast an indexed value as boolean. 

2300 

2301 e.g.:: 

2302 

2303 stmt = select([ 

2304 mytable.c.json_column['some_data'].as_boolean() 

2305 ]).where( 

2306 mytable.c.json_column['some_data'].as_boolean() == True 

2307 ) 

2308 

2309 .. versionadded:: 1.3.11 

2310 

2311 """ 

2312 return self._binary_w_type(Boolean(), "as_boolean") 

2313 

2314 def as_string(self): 

2315 """Cast an indexed value as string. 

2316 

2317 e.g.:: 

2318 

2319 stmt = select([ 

2320 mytable.c.json_column['some_data'].as_string() 

2321 ]).where( 

2322 mytable.c.json_column['some_data'].as_string() == 

2323 'some string' 

2324 ) 

2325 

2326 .. versionadded:: 1.3.11 

2327 

2328 """ 

2329 return self._binary_w_type(String(), "as_string") 

2330 

2331 def as_integer(self): 

2332 """Cast an indexed value as integer. 

2333 

2334 e.g.:: 

2335 

2336 stmt = select([ 

2337 mytable.c.json_column['some_data'].as_integer() 

2338 ]).where( 

2339 mytable.c.json_column['some_data'].as_integer() == 5 

2340 ) 

2341 

2342 .. versionadded:: 1.3.11 

2343 

2344 """ 

2345 return self._binary_w_type(Integer(), "as_integer") 

2346 

2347 def as_float(self): 

2348 """Cast an indexed value as float. 

2349 

2350 e.g.:: 

2351 

2352 stmt = select([ 

2353 mytable.c.json_column['some_data'].as_float() 

2354 ]).where( 

2355 mytable.c.json_column['some_data'].as_float() == 29.75 

2356 ) 

2357 

2358 .. versionadded:: 1.3.11 

2359 

2360 """ 

2361 # note there's no Numeric or Decimal support here yet 

2362 return self._binary_w_type(Float(), "as_float") 

2363 

2364 def as_json(self): 

2365 """Cast an indexed value as JSON. 

2366 

2367 This is the default behavior of indexed elements in any case. 

2368 

2369 Note that comparison of full JSON structures may not be 

2370 supported by all backends. 

2371 

2372 .. versionadded:: 1.3.11 

2373 

2374 """ 

2375 return self.expr 

2376 

2377 def _binary_w_type(self, typ, method_name): 

2378 if not isinstance( 

2379 self.expr, elements.BinaryExpression 

2380 ) or self.expr.operator not in ( 

2381 operators.json_getitem_op, 

2382 operators.json_path_getitem_op, 

2383 ): 

2384 raise exc.InvalidRequestError( 

2385 "The JSON cast operator JSON.%s() only works with a JSON " 

2386 "index expression e.g. col['q'].%s()" 

2387 % (method_name, method_name) 

2388 ) 

2389 expr = self.expr._clone() 

2390 expr.type = typ 

2391 return expr 

2392 

2393 comparator_factory = Comparator 

2394 

2395 @property 

2396 def python_type(self): 

2397 return dict 

2398 

2399 @property 

2400 def should_evaluate_none(self): 

2401 """Alias of :attr:`_types.JSON.none_as_null`""" 

2402 return not self.none_as_null 

2403 

2404 @should_evaluate_none.setter 

2405 def should_evaluate_none(self, value): 

2406 self.none_as_null = not value 

2407 

2408 @util.memoized_property 

2409 def _str_impl(self): 

2410 return String(_expect_unicode=True) 

2411 

2412 def bind_processor(self, dialect): 

2413 string_process = self._str_impl.bind_processor(dialect) 

2414 

2415 json_serializer = dialect._json_serializer or json.dumps 

2416 

2417 def process(value): 

2418 if value is self.NULL: 

2419 value = None 

2420 elif isinstance(value, elements.Null) or ( 

2421 value is None and self.none_as_null 

2422 ): 

2423 return None 

2424 

2425 serialized = json_serializer(value) 

2426 if string_process: 

2427 serialized = string_process(serialized) 

2428 return serialized 

2429 

2430 return process 

2431 

2432 def result_processor(self, dialect, coltype): 

2433 string_process = self._str_impl.result_processor(dialect, coltype) 

2434 json_deserializer = dialect._json_deserializer or json.loads 

2435 

2436 def process(value): 

2437 if value is None: 

2438 return None 

2439 if string_process: 

2440 value = string_process(value) 

2441 return json_deserializer(value) 

2442 

2443 return process 

2444 

2445 

2446class ARRAY(SchemaEventTarget, Indexable, Concatenable, TypeEngine): 

2447 """Represent a SQL Array type. 

2448 

2449 .. note:: This type serves as the basis for all ARRAY operations. 

2450 However, currently **only the PostgreSQL backend has support 

2451 for SQL arrays in SQLAlchemy**. It is recommended to use the 

2452 :class:`_postgresql.ARRAY` type directly when using ARRAY types 

2453 with PostgreSQL, as it provides additional operators specific 

2454 to that backend. 

2455 

2456 :class:`_types.ARRAY` is part of the Core in support of various SQL 

2457 standard functions such as :class:`_functions.array_agg` 

2458 which explicitly involve 

2459 arrays; however, with the exception of the PostgreSQL backend and possibly 

2460 some third-party dialects, no other SQLAlchemy built-in dialect has support 

2461 for this type. 

2462 

2463 An :class:`_types.ARRAY` type is constructed given the "type" 

2464 of element:: 

2465 

2466 mytable = Table("mytable", metadata, 

2467 Column("data", ARRAY(Integer)) 

2468 ) 

2469 

2470 The above type represents an N-dimensional array, 

2471 meaning a supporting backend such as PostgreSQL will interpret values 

2472 with any number of dimensions automatically. To produce an INSERT 

2473 construct that passes in a 1-dimensional array of integers:: 

2474 

2475 connection.execute( 

2476 mytable.insert(), 

2477 data=[1,2,3] 

2478 ) 

2479 

2480 The :class:`_types.ARRAY` type can be constructed given a fixed number 

2481 of dimensions:: 

2482 

2483 mytable = Table("mytable", metadata, 

2484 Column("data", ARRAY(Integer, dimensions=2)) 

2485 ) 

2486 

2487 Sending a number of dimensions is optional, but recommended if the 

2488 datatype is to represent arrays of more than one dimension. This number 

2489 is used: 

2490 

2491 * When emitting the type declaration itself to the database, e.g. 

2492 ``INTEGER[][]`` 

2493 

2494 * When translating Python values to database values, and vice versa, e.g. 

2495 an ARRAY of :class:`.Unicode` objects uses this number to efficiently 

2496 access the string values inside of array structures without resorting 

2497 to per-row type inspection 

2498 

2499 * When used with the Python ``getitem`` accessor, the number of dimensions 

2500 serves to define the kind of type that the ``[]`` operator should 

2501 return, e.g. for an ARRAY of INTEGER with two dimensions:: 

2502 

2503 >>> expr = table.c.column[5] # returns ARRAY(Integer, dimensions=1) 

2504 >>> expr = expr[6] # returns Integer 

2505 

2506 For 1-dimensional arrays, an :class:`_types.ARRAY` instance with no 

2507 dimension parameter will generally assume single-dimensional behaviors. 

2508 

2509 SQL expressions of type :class:`_types.ARRAY` have support for "index" and 

2510 "slice" behavior. The Python ``[]`` operator works normally here, given 

2511 integer indexes or slices. Arrays default to 1-based indexing. 

2512 The operator produces binary expression 

2513 constructs which will produce the appropriate SQL, both for 

2514 SELECT statements:: 

2515 

2516 select([mytable.c.data[5], mytable.c.data[2:7]]) 

2517 

2518 as well as UPDATE statements when the :meth:`_expression.Update.values` 

2519 method 

2520 is used:: 

2521 

2522 mytable.update().values({ 

2523 mytable.c.data[5]: 7, 

2524 mytable.c.data[2:7]: [1, 2, 3] 

2525 }) 

2526 

2527 The :class:`_types.ARRAY` type also provides for the operators 

2528 :meth:`.types.ARRAY.Comparator.any` and 

2529 :meth:`.types.ARRAY.Comparator.all`. The PostgreSQL-specific version of 

2530 :class:`_types.ARRAY` also provides additional operators. 

2531 

2532 .. versionadded:: 1.1.0 

2533 

2534 .. seealso:: 

2535 

2536 :class:`_postgresql.ARRAY` 

2537 

2538 """ 

2539 

2540 __visit_name__ = "ARRAY" 

2541 

2542 zero_indexes = False 

2543 """if True, Python zero-based indexes should be interpreted as one-based 

2544 on the SQL expression side.""" 

2545 

2546 class Comparator(Indexable.Comparator, Concatenable.Comparator): 

2547 

2548 """Define comparison operations for :class:`_types.ARRAY`. 

2549 

2550 More operators are available on the dialect-specific form 

2551 of this type. See :class:`.postgresql.ARRAY.Comparator`. 

2552 

2553 """ 

2554 

2555 def _setup_getitem(self, index): 

2556 if isinstance(index, slice): 

2557 return_type = self.type 

2558 if self.type.zero_indexes: 

2559 index = slice(index.start + 1, index.stop + 1, index.step) 

2560 index = Slice( 

2561 _literal_as_binds( 

2562 index.start, 

2563 name=self.expr.key, 

2564 type_=type_api.INTEGERTYPE, 

2565 ), 

2566 _literal_as_binds( 

2567 index.stop, 

2568 name=self.expr.key, 

2569 type_=type_api.INTEGERTYPE, 

2570 ), 

2571 _literal_as_binds( 

2572 index.step, 

2573 name=self.expr.key, 

2574 type_=type_api.INTEGERTYPE, 

2575 ), 

2576 ) 

2577 else: 

2578 if self.type.zero_indexes: 

2579 index += 1 

2580 if self.type.dimensions is None or self.type.dimensions == 1: 

2581 return_type = self.type.item_type 

2582 else: 

2583 adapt_kw = {"dimensions": self.type.dimensions - 1} 

2584 return_type = self.type.adapt( 

2585 self.type.__class__, **adapt_kw 

2586 ) 

2587 

2588 return operators.getitem, index, return_type 

2589 

2590 def contains(self, *arg, **kw): 

2591 raise NotImplementedError( 

2592 "ARRAY.contains() not implemented for the base " 

2593 "ARRAY type; please use the dialect-specific ARRAY type" 

2594 ) 

2595 

2596 @util.dependencies("sqlalchemy.sql.elements") 

2597 def any(self, elements, other, operator=None): 

2598 """Return ``other operator ANY (array)`` clause. 

2599 

2600 Argument places are switched, because ANY requires array 

2601 expression to be on the right hand-side. 

2602 

2603 E.g.:: 

2604 

2605 from sqlalchemy.sql import operators 

2606 

2607 conn.execute( 

2608 select([table.c.data]).where( 

2609 table.c.data.any(7, operator=operators.lt) 

2610 ) 

2611 ) 

2612 

2613 :param other: expression to be compared 

2614 :param operator: an operator object from the 

2615 :mod:`sqlalchemy.sql.operators` 

2616 package, defaults to :func:`.operators.eq`. 

2617 

2618 .. seealso:: 

2619 

2620 :func:`_expression.any_` 

2621 

2622 :meth:`.types.ARRAY.Comparator.all` 

2623 

2624 """ 

2625 operator = operator if operator else operators.eq 

2626 return operator( 

2627 elements._literal_as_binds(other), 

2628 elements.CollectionAggregate._create_any(self.expr), 

2629 ) 

2630 

2631 @util.dependencies("sqlalchemy.sql.elements") 

2632 def all(self, elements, other, operator=None): 

2633 """Return ``other operator ALL (array)`` clause. 

2634 

2635 Argument places are switched, because ALL requires array 

2636 expression to be on the right hand-side. 

2637 

2638 E.g.:: 

2639 

2640 from sqlalchemy.sql import operators 

2641 

2642 conn.execute( 

2643 select([table.c.data]).where( 

2644 table.c.data.all(7, operator=operators.lt) 

2645 ) 

2646 ) 

2647 

2648 :param other: expression to be compared 

2649 :param operator: an operator object from the 

2650 :mod:`sqlalchemy.sql.operators` 

2651 package, defaults to :func:`.operators.eq`. 

2652 

2653 .. seealso:: 

2654 

2655 :func:`_expression.all_` 

2656 

2657 :meth:`.types.ARRAY.Comparator.any` 

2658 

2659 """ 

2660 operator = operator if operator else operators.eq 

2661 return operator( 

2662 elements._literal_as_binds(other), 

2663 elements.CollectionAggregate._create_all(self.expr), 

2664 ) 

2665 

2666 comparator_factory = Comparator 

2667 

2668 def __init__( 

2669 self, item_type, as_tuple=False, dimensions=None, zero_indexes=False 

2670 ): 

2671 """Construct an :class:`_types.ARRAY`. 

2672 

2673 E.g.:: 

2674 

2675 Column('myarray', ARRAY(Integer)) 

2676 

2677 Arguments are: 

2678 

2679 :param item_type: The data type of items of this array. Note that 

2680 dimensionality is irrelevant here, so multi-dimensional arrays like 

2681 ``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as 

2682 ``ARRAY(ARRAY(Integer))`` or such. 

2683 

2684 :param as_tuple=False: Specify whether return results 

2685 should be converted to tuples from lists. This parameter is 

2686 not generally needed as a Python list corresponds well 

2687 to a SQL array. 

2688 

2689 :param dimensions: if non-None, the ARRAY will assume a fixed 

2690 number of dimensions. This impacts how the array is declared 

2691 on the database, how it goes about interpreting Python and 

2692 result values, as well as how expression behavior in conjunction 

2693 with the "getitem" operator works. See the description at 

2694 :class:`_types.ARRAY` for additional detail. 

2695 

2696 :param zero_indexes=False: when True, index values will be converted 

2697 between Python zero-based and SQL one-based indexes, e.g. 

2698 a value of one will be added to all index values before passing 

2699 to the database. 

2700 

2701 """ 

2702 if isinstance(item_type, ARRAY): 

2703 raise ValueError( 

2704 "Do not nest ARRAY types; ARRAY(basetype) " 

2705 "handles multi-dimensional arrays of basetype" 

2706 ) 

2707 if isinstance(item_type, type): 

2708 item_type = item_type() 

2709 self.item_type = item_type 

2710 self.as_tuple = as_tuple 

2711 self.dimensions = dimensions 

2712 self.zero_indexes = zero_indexes 

2713 

2714 @property 

2715 def hashable(self): 

2716 return self.as_tuple 

2717 

2718 @property 

2719 def python_type(self): 

2720 return list 

2721 

2722 def compare_values(self, x, y): 

2723 return x == y 

2724 

2725 def _set_parent(self, column): 

2726 """Support SchemaEventTarget""" 

2727 

2728 if isinstance(self.item_type, SchemaEventTarget): 

2729 self.item_type._set_parent(column) 

2730 

2731 def _set_parent_with_dispatch(self, parent): 

2732 """Support SchemaEventTarget""" 

2733 

2734 super(ARRAY, self)._set_parent_with_dispatch(parent) 

2735 

2736 if isinstance(self.item_type, SchemaEventTarget): 

2737 self.item_type._set_parent_with_dispatch(parent) 

2738 

2739 

2740class REAL(Float): 

2741 

2742 """The SQL REAL type.""" 

2743 

2744 __visit_name__ = "REAL" 

2745 

2746 

2747class FLOAT(Float): 

2748 

2749 """The SQL FLOAT type.""" 

2750 

2751 __visit_name__ = "FLOAT" 

2752 

2753 

2754class NUMERIC(Numeric): 

2755 

2756 """The SQL NUMERIC type.""" 

2757 

2758 __visit_name__ = "NUMERIC" 

2759 

2760 

2761class DECIMAL(Numeric): 

2762 

2763 """The SQL DECIMAL type.""" 

2764 

2765 __visit_name__ = "DECIMAL" 

2766 

2767 

2768class INTEGER(Integer): 

2769 

2770 """The SQL INT or INTEGER type.""" 

2771 

2772 __visit_name__ = "INTEGER" 

2773 

2774 

2775INT = INTEGER 

2776 

2777 

2778class SMALLINT(SmallInteger): 

2779 

2780 """The SQL SMALLINT type.""" 

2781 

2782 __visit_name__ = "SMALLINT" 

2783 

2784 

2785class BIGINT(BigInteger): 

2786 

2787 """The SQL BIGINT type.""" 

2788 

2789 __visit_name__ = "BIGINT" 

2790 

2791 

2792class TIMESTAMP(DateTime): 

2793 

2794 """The SQL TIMESTAMP type. 

2795 

2796 :class:`_types.TIMESTAMP` datatypes have support for timezone 

2797 storage on some backends, such as PostgreSQL and Oracle. Use the 

2798 :paramref:`~types.TIMESTAMP.timezone` argument in order to enable 

2799 "TIMESTAMP WITH TIMEZONE" for these backends. 

2800 

2801 """ 

2802 

2803 __visit_name__ = "TIMESTAMP" 

2804 

2805 def __init__(self, timezone=False): 

2806 """Construct a new :class:`_types.TIMESTAMP`. 

2807 

2808 :param timezone: boolean. Indicates that the TIMESTAMP type should 

2809 enable timezone support, if available on the target database. 

2810 On a per-dialect basis is similar to "TIMESTAMP WITH TIMEZONE". 

2811 If the target database does not support timezones, this flag is 

2812 ignored. 

2813 

2814 

2815 """ 

2816 super(TIMESTAMP, self).__init__(timezone=timezone) 

2817 

2818 def get_dbapi_type(self, dbapi): 

2819 return dbapi.TIMESTAMP 

2820 

2821 

2822class DATETIME(DateTime): 

2823 

2824 """The SQL DATETIME type.""" 

2825 

2826 __visit_name__ = "DATETIME" 

2827 

2828 

2829class DATE(Date): 

2830 

2831 """The SQL DATE type.""" 

2832 

2833 __visit_name__ = "DATE" 

2834 

2835 

2836class TIME(Time): 

2837 

2838 """The SQL TIME type.""" 

2839 

2840 __visit_name__ = "TIME" 

2841 

2842 

2843class TEXT(Text): 

2844 

2845 """The SQL TEXT type.""" 

2846 

2847 __visit_name__ = "TEXT" 

2848 

2849 

2850class CLOB(Text): 

2851 

2852 """The CLOB type. 

2853 

2854 This type is found in Oracle and Informix. 

2855 """ 

2856 

2857 __visit_name__ = "CLOB" 

2858 

2859 

2860class VARCHAR(String): 

2861 

2862 """The SQL VARCHAR type.""" 

2863 

2864 __visit_name__ = "VARCHAR" 

2865 

2866 

2867class NVARCHAR(Unicode): 

2868 

2869 """The SQL NVARCHAR type.""" 

2870 

2871 __visit_name__ = "NVARCHAR" 

2872 

2873 

2874class CHAR(String): 

2875 

2876 """The SQL CHAR type.""" 

2877 

2878 __visit_name__ = "CHAR" 

2879 

2880 

2881class NCHAR(Unicode): 

2882 

2883 """The SQL NCHAR type.""" 

2884 

2885 __visit_name__ = "NCHAR" 

2886 

2887 

2888class BLOB(LargeBinary): 

2889 

2890 """The SQL BLOB type.""" 

2891 

2892 __visit_name__ = "BLOB" 

2893 

2894 

2895class BINARY(_Binary): 

2896 

2897 """The SQL BINARY type.""" 

2898 

2899 __visit_name__ = "BINARY" 

2900 

2901 

2902class VARBINARY(_Binary): 

2903 

2904 """The SQL VARBINARY type.""" 

2905 

2906 __visit_name__ = "VARBINARY" 

2907 

2908 

2909class BOOLEAN(Boolean): 

2910 

2911 """The SQL BOOLEAN type.""" 

2912 

2913 __visit_name__ = "BOOLEAN" 

2914 

2915 

2916class NullType(TypeEngine): 

2917 

2918 """An unknown type. 

2919 

2920 :class:`.NullType` is used as a default type for those cases where 

2921 a type cannot be determined, including: 

2922 

2923 * During table reflection, when the type of a column is not recognized 

2924 by the :class:`.Dialect` 

2925 * When constructing SQL expressions using plain Python objects of 

2926 unknown types (e.g. ``somecolumn == my_special_object``) 

2927 * When a new :class:`_schema.Column` is created, 

2928 and the given type is passed 

2929 as ``None`` or is not passed at all. 

2930 

2931 The :class:`.NullType` can be used within SQL expression invocation 

2932 without issue, it just has no behavior either at the expression 

2933 construction level or at the bind-parameter/result processing level. 

2934 :class:`.NullType` will result in a :exc:`.CompileError` if the compiler 

2935 is asked to render the type itself, such as if it is used in a 

2936 :func:`.cast` operation or within a schema creation operation such as that 

2937 invoked by :meth:`_schema.MetaData.create_all` or the 

2938 :class:`.CreateTable` 

2939 construct. 

2940 

2941 """ 

2942 

2943 __visit_name__ = "null" 

2944 

2945 _isnull = True 

2946 

2947 hashable = False 

2948 

2949 def literal_processor(self, dialect): 

2950 def process(value): 

2951 return "NULL" 

2952 

2953 return process 

2954 

2955 class Comparator(TypeEngine.Comparator): 

2956 def _adapt_expression(self, op, other_comparator): 

2957 if isinstance( 

2958 other_comparator, NullType.Comparator 

2959 ) or not operators.is_commutative(op): 

2960 return op, self.expr.type 

2961 else: 

2962 return other_comparator._adapt_expression(op, self) 

2963 

2964 comparator_factory = Comparator 

2965 

2966 

2967class MatchType(Boolean): 

2968 """Refers to the return type of the MATCH operator. 

2969 

2970 As the :meth:`.ColumnOperators.match` is probably the most open-ended 

2971 operator in generic SQLAlchemy Core, we can't assume the return type 

2972 at SQL evaluation time, as MySQL returns a floating point, not a boolean, 

2973 and other backends might do something different. So this type 

2974 acts as a placeholder, currently subclassing :class:`.Boolean`. 

2975 The type allows dialects to inject result-processing functionality 

2976 if needed, and on MySQL will return floating-point values. 

2977 

2978 .. versionadded:: 1.0.0 

2979 

2980 """ 

2981 

2982 

2983NULLTYPE = NullType() 

2984BOOLEANTYPE = Boolean() 

2985STRINGTYPE = String() 

2986INTEGERTYPE = Integer() 

2987MATCHTYPE = MatchType() 

2988 

2989_type_map = { 

2990 int: Integer(), 

2991 float: Float(), 

2992 bool: BOOLEANTYPE, 

2993 decimal.Decimal: Numeric(), 

2994 dt.date: Date(), 

2995 dt.datetime: DateTime(), 

2996 dt.time: Time(), 

2997 dt.timedelta: Interval(), 

2998 util.NoneType: NULLTYPE, 

2999} 

3000 

3001if util.py3k: 

3002 _type_map[bytes] = LargeBinary() # noqa 

3003 _type_map[str] = Unicode() 

3004else: 

3005 _type_map[unicode] = Unicode() # noqa 

3006 _type_map[str] = String() 

3007 

3008_type_map_get = _type_map.get 

3009 

3010 

3011def _resolve_value_to_type(value): 

3012 _result_type = _type_map_get(type(value), False) 

3013 if _result_type is False: 

3014 # use inspect() to detect SQLAlchemy built-in 

3015 # objects. 

3016 insp = inspection.inspect(value, False) 

3017 if ( 

3018 insp is not None 

3019 and 

3020 # foil mock.Mock() and other impostors by ensuring 

3021 # the inspection target itself self-inspects 

3022 insp.__class__ in inspection._registrars 

3023 ): 

3024 raise exc.ArgumentError( 

3025 "Object %r is not legal as a SQL literal value" % value 

3026 ) 

3027 return NULLTYPE 

3028 else: 

3029 return _result_type 

3030 

3031 

3032# back-assign to type_api 

3033type_api.BOOLEANTYPE = BOOLEANTYPE 

3034type_api.STRINGTYPE = STRINGTYPE 

3035type_api.INTEGERTYPE = INTEGERTYPE 

3036type_api.NULLTYPE = NULLTYPE 

3037type_api.MATCHTYPE = MATCHTYPE 

3038type_api.INDEXABLE = Indexable 

3039type_api._resolve_value_to_type = _resolve_value_to_type 

3040TypeEngine.Comparator.BOOLEANTYPE = BOOLEANTYPE