You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

3294 lines
108KB

  1. # sql/sqltypes.py
  2. # Copyright (C) 2005-2021 the SQLAlchemy authors and contributors
  3. # <see AUTHORS file>
  4. #
  5. # This module is part of SQLAlchemy and is released under
  6. # the MIT License: http://www.opensource.org/licenses/mit-license.php
  7. """SQL specific types.
  8. """
  9. import codecs
  10. import datetime as dt
  11. import decimal
  12. import json
  13. from . import coercions
  14. from . import elements
  15. from . import operators
  16. from . import roles
  17. from . import type_api
  18. from .base import _bind_or_error
  19. from .base import NO_ARG
  20. from .base import SchemaEventTarget
  21. from .elements import _NONE_NAME
  22. from .elements import quoted_name
  23. from .elements import Slice
  24. from .elements import TypeCoerce as type_coerce # noqa
  25. from .traversals import HasCacheKey
  26. from .traversals import InternalTraversal
  27. from .type_api import Emulated
  28. from .type_api import NativeForEmulated # noqa
  29. from .type_api import to_instance
  30. from .type_api import TypeDecorator
  31. from .type_api import TypeEngine
  32. from .type_api import Variant
  33. from .. import event
  34. from .. import exc
  35. from .. import inspection
  36. from .. import processors
  37. from .. import util
  38. from ..util import compat
  39. from ..util import langhelpers
  40. from ..util import OrderedDict
  41. from ..util import pickle
  42. class _LookupExpressionAdapter(object):
  43. """Mixin expression adaptations based on lookup tables.
  44. These rules are currently used by the numeric, integer and date types
  45. which have detailed cross-expression coercion rules.
  46. """
  47. @property
  48. def _expression_adaptations(self):
  49. raise NotImplementedError()
  50. class Comparator(TypeEngine.Comparator):
  51. _blank_dict = util.immutabledict()
  52. def _adapt_expression(self, op, other_comparator):
  53. othertype = other_comparator.type._type_affinity
  54. lookup = self.type._expression_adaptations.get(
  55. op, self._blank_dict
  56. ).get(othertype, self.type)
  57. if lookup is othertype:
  58. return (op, other_comparator.type)
  59. elif lookup is self.type._type_affinity:
  60. return (op, self.type)
  61. else:
  62. return (op, to_instance(lookup))
  63. comparator_factory = Comparator
  64. class Concatenable(object):
  65. """A mixin that marks a type as supporting 'concatenation',
  66. typically strings."""
  67. class Comparator(TypeEngine.Comparator):
  68. def _adapt_expression(self, op, other_comparator):
  69. if op is operators.add and isinstance(
  70. other_comparator,
  71. (Concatenable.Comparator, NullType.Comparator),
  72. ):
  73. return operators.concat_op, self.expr.type
  74. else:
  75. return super(Concatenable.Comparator, self)._adapt_expression(
  76. op, other_comparator
  77. )
  78. comparator_factory = Comparator
  79. class Indexable(object):
  80. """A mixin that marks a type as supporting indexing operations,
  81. such as array or JSON structures.
  82. .. versionadded:: 1.1.0
  83. """
  84. class Comparator(TypeEngine.Comparator):
  85. def _setup_getitem(self, index):
  86. raise NotImplementedError()
  87. def __getitem__(self, index):
  88. (
  89. adjusted_op,
  90. adjusted_right_expr,
  91. result_type,
  92. ) = self._setup_getitem(index)
  93. return self.operate(
  94. adjusted_op, adjusted_right_expr, result_type=result_type
  95. )
  96. comparator_factory = Comparator
  97. class String(Concatenable, TypeEngine):
  98. """The base for all string and character types.
  99. In SQL, corresponds to VARCHAR. Can also take Python unicode objects
  100. and encode to the database's encoding in bind params (and the reverse for
  101. result sets.)
  102. The `length` field is usually required when the `String` type is
  103. used within a CREATE TABLE statement, as VARCHAR requires a length
  104. on most databases.
  105. """
  106. __visit_name__ = "string"
  107. RETURNS_UNICODE = util.symbol(
  108. "RETURNS_UNICODE",
  109. """Indicates that the DBAPI returns Python Unicode for VARCHAR,
  110. NVARCHAR, and other character-based datatypes in all cases.
  111. This is the default value for
  112. :attr:`.DefaultDialect.returns_unicode_strings` under Python 3.
  113. .. versionadded:: 1.4
  114. """,
  115. )
  116. RETURNS_BYTES = util.symbol(
  117. "RETURNS_BYTES",
  118. """Indicates that the DBAPI returns byte objects under Python 3
  119. or non-Unicode string objects under Python 2 for VARCHAR, NVARCHAR,
  120. and other character-based datatypes in all cases.
  121. This may be applied to the
  122. :attr:`.DefaultDialect.returns_unicode_strings` attribute.
  123. .. versionadded:: 1.4
  124. """,
  125. )
  126. RETURNS_CONDITIONAL = util.symbol(
  127. "RETURNS_CONDITIONAL",
  128. """Indicates that the DBAPI may return Unicode or bytestrings for
  129. VARCHAR, NVARCHAR, and other character-based datatypes, and that
  130. SQLAlchemy's default String datatype will need to test on a per-row
  131. basis for Unicode or bytes.
  132. This may be applied to the
  133. :attr:`.DefaultDialect.returns_unicode_strings` attribute.
  134. .. versionadded:: 1.4
  135. """,
  136. )
  137. RETURNS_UNKNOWN = util.symbol(
  138. "RETURNS_UNKNOWN",
  139. """Indicates that the dialect should test on first connect what the
  140. string-returning behavior of character-based datatypes is.
  141. This is the default value for DefaultDialect.unicode_returns under
  142. Python 2.
  143. This may be applied to the
  144. :attr:`.DefaultDialect.returns_unicode_strings` attribute under
  145. Python 2 only. The value is disallowed under Python 3.
  146. .. versionadded:: 1.4
  147. .. deprecated:: 1.4 This value will be removed in SQLAlchemy 2.0.
  148. """,
  149. )
  150. @util.deprecated_params(
  151. convert_unicode=(
  152. "1.3",
  153. "The :paramref:`.String.convert_unicode` parameter is deprecated "
  154. "and will be removed in a future release. All modern DBAPIs "
  155. "now support Python Unicode directly and this parameter is "
  156. "unnecessary.",
  157. ),
  158. unicode_error=(
  159. "1.3",
  160. "The :paramref:`.String.unicode_errors` parameter is deprecated "
  161. "and will be removed in a future release. This parameter is "
  162. "unnecessary for modern Python DBAPIs and degrades performance "
  163. "significantly.",
  164. ),
  165. )
  166. def __init__(
  167. self,
  168. length=None,
  169. collation=None,
  170. convert_unicode=False,
  171. unicode_error=None,
  172. _warn_on_bytestring=False,
  173. _expect_unicode=False,
  174. ):
  175. """
  176. Create a string-holding type.
  177. :param length: optional, a length for the column for use in
  178. DDL and CAST expressions. May be safely omitted if no ``CREATE
  179. TABLE`` will be issued. Certain databases may require a
  180. ``length`` for use in DDL, and will raise an exception when
  181. the ``CREATE TABLE`` DDL is issued if a ``VARCHAR``
  182. with no length is included. Whether the value is
  183. interpreted as bytes or characters is database specific.
  184. :param collation: Optional, a column-level collation for
  185. use in DDL and CAST expressions. Renders using the
  186. COLLATE keyword supported by SQLite, MySQL, and PostgreSQL.
  187. E.g.::
  188. >>> from sqlalchemy import cast, select, String
  189. >>> print(select(cast('some string', String(collation='utf8'))))
  190. SELECT CAST(:param_1 AS VARCHAR COLLATE utf8) AS anon_1
  191. :param convert_unicode: When set to ``True``, the
  192. :class:`.String` type will assume that
  193. input is to be passed as Python Unicode objects under Python 2,
  194. and results returned as Python Unicode objects.
  195. In the rare circumstance that the DBAPI does not support
  196. Python unicode under Python 2, SQLAlchemy will use its own
  197. encoder/decoder functionality on strings, referring to the
  198. value of the :paramref:`_sa.create_engine.encoding` parameter
  199. parameter passed to :func:`_sa.create_engine` as the encoding.
  200. For the extremely rare case that Python Unicode
  201. is to be encoded/decoded by SQLAlchemy on a backend
  202. that *does* natively support Python Unicode,
  203. the string value ``"force"`` can be passed here which will
  204. cause SQLAlchemy's encode/decode services to be
  205. used unconditionally.
  206. .. note::
  207. SQLAlchemy's unicode-conversion flags and features only apply
  208. to Python 2; in Python 3, all string objects are Unicode objects.
  209. For this reason, as well as the fact that virtually all modern
  210. DBAPIs now support Unicode natively even under Python 2,
  211. the :paramref:`.String.convert_unicode` flag is inherently a
  212. legacy feature.
  213. .. note::
  214. In the vast majority of cases, the :class:`.Unicode` or
  215. :class:`.UnicodeText` datatypes should be used for a
  216. :class:`_schema.Column` that expects to store non-ascii data.
  217. These
  218. datatypes will ensure that the correct types are used on the
  219. database side as well as set up the correct Unicode behaviors
  220. under Python 2.
  221. .. seealso::
  222. :paramref:`_sa.create_engine.convert_unicode` -
  223. :class:`_engine.Engine`-wide parameter
  224. :param unicode_error: Optional, a method to use to handle Unicode
  225. conversion errors. Behaves like the ``errors`` keyword argument to
  226. the standard library's ``string.decode()`` functions, requires
  227. that :paramref:`.String.convert_unicode` is set to
  228. ``"force"``
  229. """
  230. if unicode_error is not None and convert_unicode != "force":
  231. raise exc.ArgumentError(
  232. "convert_unicode must be 'force' " "when unicode_error is set."
  233. )
  234. self.length = length
  235. self.collation = collation
  236. self._expect_unicode = convert_unicode or _expect_unicode
  237. self._expect_unicode_error = unicode_error
  238. self._warn_on_bytestring = _warn_on_bytestring
  239. def literal_processor(self, dialect):
  240. def process(value):
  241. value = value.replace("'", "''")
  242. if dialect.identifier_preparer._double_percents:
  243. value = value.replace("%", "%%")
  244. return "'%s'" % value
  245. return process
  246. def bind_processor(self, dialect):
  247. if self._expect_unicode or dialect.convert_unicode:
  248. if (
  249. dialect.supports_unicode_binds
  250. and self._expect_unicode != "force"
  251. ):
  252. if self._warn_on_bytestring:
  253. def process(value):
  254. if isinstance(value, util.binary_type):
  255. util.warn_limited(
  256. "Unicode type received non-unicode "
  257. "bind param value %r.",
  258. (util.ellipses_string(value),),
  259. )
  260. return value
  261. return process
  262. else:
  263. return None
  264. else:
  265. encoder = codecs.getencoder(dialect.encoding)
  266. warn_on_bytestring = self._warn_on_bytestring
  267. def process(value):
  268. if isinstance(value, util.text_type):
  269. return encoder(value, self._expect_unicode_error)[0]
  270. elif warn_on_bytestring and value is not None:
  271. util.warn_limited(
  272. "Unicode type received non-unicode bind "
  273. "param value %r.",
  274. (util.ellipses_string(value),),
  275. )
  276. return value
  277. return process
  278. else:
  279. return None
  280. def result_processor(self, dialect, coltype):
  281. wants_unicode = self._expect_unicode or dialect.convert_unicode
  282. needs_convert = wants_unicode and (
  283. dialect.returns_unicode_strings is not String.RETURNS_UNICODE
  284. or self._expect_unicode in ("force", "force_nocheck")
  285. )
  286. needs_isinstance = (
  287. needs_convert
  288. and dialect.returns_unicode_strings
  289. in (
  290. String.RETURNS_CONDITIONAL,
  291. String.RETURNS_UNICODE,
  292. )
  293. and self._expect_unicode != "force_nocheck"
  294. )
  295. if needs_convert:
  296. if needs_isinstance:
  297. return processors.to_conditional_unicode_processor_factory(
  298. dialect.encoding, self._expect_unicode_error
  299. )
  300. else:
  301. return processors.to_unicode_processor_factory(
  302. dialect.encoding, self._expect_unicode_error
  303. )
  304. else:
  305. return None
  306. @property
  307. def python_type(self):
  308. if self._expect_unicode:
  309. return util.text_type
  310. else:
  311. return str
  312. def get_dbapi_type(self, dbapi):
  313. return dbapi.STRING
  314. @classmethod
  315. def _warn_deprecated_unicode(cls):
  316. util.warn_deprecated(
  317. "The convert_unicode on Engine and String as well as the "
  318. "unicode_error flag on String are deprecated. All modern "
  319. "DBAPIs now support Python Unicode natively under Python 2, and "
  320. "under Python 3 all strings are inherently Unicode. These flags "
  321. "will be removed in a future release.",
  322. version="1.3",
  323. )
  324. class Text(String):
  325. """A variably sized string type.
  326. In SQL, usually corresponds to CLOB or TEXT. Can also take Python
  327. unicode objects and encode to the database's encoding in bind
  328. params (and the reverse for result sets.) In general, TEXT objects
  329. do not have a length; while some databases will accept a length
  330. argument here, it will be rejected by others.
  331. """
  332. __visit_name__ = "text"
  333. class Unicode(String):
  334. """A variable length Unicode string type.
  335. The :class:`.Unicode` type is a :class:`.String` subclass that assumes
  336. input and output strings that may contain non-ASCII characters, and for
  337. some backends implies an underlying column type that is explicitly
  338. supporting of non-ASCII data, such as ``NVARCHAR`` on Oracle and SQL
  339. Server. This will impact the output of ``CREATE TABLE`` statements and
  340. ``CAST`` functions at the dialect level, and also in some cases will
  341. indicate different behavior in the DBAPI itself in how it handles bound
  342. parameters.
  343. The character encoding used by the :class:`.Unicode` type that is used to
  344. transmit and receive data to the database is usually determined by the
  345. DBAPI itself. All modern DBAPIs accommodate non-ASCII strings but may have
  346. different methods of managing database encodings; if necessary, this
  347. encoding should be configured as detailed in the notes for the target DBAPI
  348. in the :ref:`dialect_toplevel` section.
  349. In modern SQLAlchemy, use of the :class:`.Unicode` datatype does not
  350. typically imply any encoding/decoding behavior within SQLAlchemy itself.
  351. Historically, when DBAPIs did not support Python ``unicode`` objects under
  352. Python 2, SQLAlchemy handled unicode encoding/decoding services itself
  353. which would be controlled by the flag :paramref:`.String.convert_unicode`;
  354. this flag is deprecated as it is no longer needed for Python 3.
  355. When using Python 2, data that is passed to columns that use the
  356. :class:`.Unicode` datatype must be of type ``unicode``, and not ``str``
  357. which in Python 2 is equivalent to ``bytes``. In Python 3, all data
  358. passed to columns that use the :class:`.Unicode` datatype should be
  359. of type ``str``. See the flag :paramref:`.String.convert_unicode` for
  360. more discussion of unicode encode/decode behavior under Python 2.
  361. .. warning:: Some database backends, particularly SQL Server with pyodbc,
  362. are known to have undesirable behaviors regarding data that is noted
  363. as being of ``NVARCHAR`` type as opposed to ``VARCHAR``, including
  364. datatype mismatch errors and non-use of indexes. See the section
  365. on :meth:`.DialectEvents.do_setinputsizes` for background on working
  366. around unicode character issues for backends like SQL Server with
  367. pyodbc as well as cx_Oracle.
  368. .. seealso::
  369. :class:`.UnicodeText` - unlengthed textual counterpart
  370. to :class:`.Unicode`.
  371. :paramref:`.String.convert_unicode`
  372. :meth:`.DialectEvents.do_setinputsizes`
  373. """
  374. __visit_name__ = "unicode"
  375. def __init__(self, length=None, **kwargs):
  376. """
  377. Create a :class:`.Unicode` object.
  378. Parameters are the same as that of :class:`.String`,
  379. with the exception that ``convert_unicode``
  380. defaults to ``True``.
  381. """
  382. kwargs.setdefault("_expect_unicode", True)
  383. kwargs.setdefault("_warn_on_bytestring", True)
  384. super(Unicode, self).__init__(length=length, **kwargs)
  385. class UnicodeText(Text):
  386. """An unbounded-length Unicode string type.
  387. See :class:`.Unicode` for details on the unicode
  388. behavior of this object.
  389. Like :class:`.Unicode`, usage the :class:`.UnicodeText` type implies a
  390. unicode-capable type being used on the backend, such as
  391. ``NCLOB``, ``NTEXT``.
  392. """
  393. __visit_name__ = "unicode_text"
  394. def __init__(self, length=None, **kwargs):
  395. """
  396. Create a Unicode-converting Text type.
  397. Parameters are the same as that of :class:`_expression.TextClause`,
  398. with the exception that ``convert_unicode``
  399. defaults to ``True``.
  400. """
  401. kwargs.setdefault("_expect_unicode", True)
  402. kwargs.setdefault("_warn_on_bytestring", True)
  403. super(UnicodeText, self).__init__(length=length, **kwargs)
  404. def _warn_deprecated_unicode(self):
  405. pass
  406. class Integer(_LookupExpressionAdapter, TypeEngine):
  407. """A type for ``int`` integers."""
  408. __visit_name__ = "integer"
  409. def get_dbapi_type(self, dbapi):
  410. return dbapi.NUMBER
  411. @property
  412. def python_type(self):
  413. return int
  414. def literal_processor(self, dialect):
  415. def process(value):
  416. return str(int(value))
  417. return process
  418. @util.memoized_property
  419. def _expression_adaptations(self):
  420. # TODO: need a dictionary object that will
  421. # handle operators generically here, this is incomplete
  422. return {
  423. operators.add: {
  424. Date: Date,
  425. Integer: self.__class__,
  426. Numeric: Numeric,
  427. },
  428. operators.mul: {
  429. Interval: Interval,
  430. Integer: self.__class__,
  431. Numeric: Numeric,
  432. },
  433. operators.div: {Integer: self.__class__, Numeric: Numeric},
  434. operators.truediv: {Integer: self.__class__, Numeric: Numeric},
  435. operators.sub: {Integer: self.__class__, Numeric: Numeric},
  436. }
  437. class SmallInteger(Integer):
  438. """A type for smaller ``int`` integers.
  439. Typically generates a ``SMALLINT`` in DDL, and otherwise acts like
  440. a normal :class:`.Integer` on the Python side.
  441. """
  442. __visit_name__ = "small_integer"
  443. class BigInteger(Integer):
  444. """A type for bigger ``int`` integers.
  445. Typically generates a ``BIGINT`` in DDL, and otherwise acts like
  446. a normal :class:`.Integer` on the Python side.
  447. """
  448. __visit_name__ = "big_integer"
  449. class Numeric(_LookupExpressionAdapter, TypeEngine):
  450. """A type for fixed precision numbers, such as ``NUMERIC`` or ``DECIMAL``.
  451. This type returns Python ``decimal.Decimal`` objects by default, unless
  452. the :paramref:`.Numeric.asdecimal` flag is set to False, in which case
  453. they are coerced to Python ``float`` objects.
  454. .. note::
  455. The :class:`.Numeric` type is designed to receive data from a database
  456. type that is explicitly known to be a decimal type
  457. (e.g. ``DECIMAL``, ``NUMERIC``, others) and not a floating point
  458. type (e.g. ``FLOAT``, ``REAL``, others).
  459. If the database column on the server is in fact a floating-point
  460. type, such as ``FLOAT`` or ``REAL``, use the :class:`.Float`
  461. type or a subclass, otherwise numeric coercion between
  462. ``float``/``Decimal`` may or may not function as expected.
  463. .. note::
  464. The Python ``decimal.Decimal`` class is generally slow
  465. performing; cPython 3.3 has now switched to use the `cdecimal
  466. <http://pypi.python.org/pypi/cdecimal/>`_ library natively. For
  467. older Python versions, the ``cdecimal`` library can be patched
  468. into any application where it will replace the ``decimal``
  469. library fully, however this needs to be applied globally and
  470. before any other modules have been imported, as follows::
  471. import sys
  472. import cdecimal
  473. sys.modules["decimal"] = cdecimal
  474. Note that the ``cdecimal`` and ``decimal`` libraries are **not
  475. compatible with each other**, so patching ``cdecimal`` at the
  476. global level is the only way it can be used effectively with
  477. various DBAPIs that hardcode to import the ``decimal`` library.
  478. """
  479. __visit_name__ = "numeric"
  480. _default_decimal_return_scale = 10
  481. def __init__(
  482. self,
  483. precision=None,
  484. scale=None,
  485. decimal_return_scale=None,
  486. asdecimal=True,
  487. ):
  488. """
  489. Construct a Numeric.
  490. :param precision: the numeric precision for use in DDL ``CREATE
  491. TABLE``.
  492. :param scale: the numeric scale for use in DDL ``CREATE TABLE``.
  493. :param asdecimal: default True. Return whether or not
  494. values should be sent as Python Decimal objects, or
  495. as floats. Different DBAPIs send one or the other based on
  496. datatypes - the Numeric type will ensure that return values
  497. are one or the other across DBAPIs consistently.
  498. :param decimal_return_scale: Default scale to use when converting
  499. from floats to Python decimals. Floating point values will typically
  500. be much longer due to decimal inaccuracy, and most floating point
  501. database types don't have a notion of "scale", so by default the
  502. float type looks for the first ten decimal places when converting.
  503. Specifying this value will override that length. Types which
  504. do include an explicit ".scale" value, such as the base
  505. :class:`.Numeric` as well as the MySQL float types, will use the
  506. value of ".scale" as the default for decimal_return_scale, if not
  507. otherwise specified.
  508. .. versionadded:: 0.9.0
  509. When using the ``Numeric`` type, care should be taken to ensure
  510. that the asdecimal setting is appropriate for the DBAPI in use -
  511. when Numeric applies a conversion from Decimal->float or float->
  512. Decimal, this conversion incurs an additional performance overhead
  513. for all result columns received.
  514. DBAPIs that return Decimal natively (e.g. psycopg2) will have
  515. better accuracy and higher performance with a setting of ``True``,
  516. as the native translation to Decimal reduces the amount of floating-
  517. point issues at play, and the Numeric type itself doesn't need
  518. to apply any further conversions. However, another DBAPI which
  519. returns floats natively *will* incur an additional conversion
  520. overhead, and is still subject to floating point data loss - in
  521. which case ``asdecimal=False`` will at least remove the extra
  522. conversion overhead.
  523. """
  524. self.precision = precision
  525. self.scale = scale
  526. self.decimal_return_scale = decimal_return_scale
  527. self.asdecimal = asdecimal
  528. @property
  529. def _effective_decimal_return_scale(self):
  530. if self.decimal_return_scale is not None:
  531. return self.decimal_return_scale
  532. elif getattr(self, "scale", None) is not None:
  533. return self.scale
  534. else:
  535. return self._default_decimal_return_scale
  536. def get_dbapi_type(self, dbapi):
  537. return dbapi.NUMBER
  538. def literal_processor(self, dialect):
  539. def process(value):
  540. return str(value)
  541. return process
  542. @property
  543. def python_type(self):
  544. if self.asdecimal:
  545. return decimal.Decimal
  546. else:
  547. return float
  548. def bind_processor(self, dialect):
  549. if dialect.supports_native_decimal:
  550. return None
  551. else:
  552. return processors.to_float
  553. def result_processor(self, dialect, coltype):
  554. if self.asdecimal:
  555. if dialect.supports_native_decimal:
  556. # we're a "numeric", DBAPI will give us Decimal directly
  557. return None
  558. else:
  559. util.warn(
  560. "Dialect %s+%s does *not* support Decimal "
  561. "objects natively, and SQLAlchemy must "
  562. "convert from floating point - rounding "
  563. "errors and other issues may occur. Please "
  564. "consider storing Decimal numbers as strings "
  565. "or integers on this platform for lossless "
  566. "storage." % (dialect.name, dialect.driver)
  567. )
  568. # we're a "numeric", DBAPI returns floats, convert.
  569. return processors.to_decimal_processor_factory(
  570. decimal.Decimal,
  571. self.scale
  572. if self.scale is not None
  573. else self._default_decimal_return_scale,
  574. )
  575. else:
  576. if dialect.supports_native_decimal:
  577. return processors.to_float
  578. else:
  579. return None
  580. @util.memoized_property
  581. def _expression_adaptations(self):
  582. return {
  583. operators.mul: {
  584. Interval: Interval,
  585. Numeric: self.__class__,
  586. Integer: self.__class__,
  587. },
  588. operators.div: {Numeric: self.__class__, Integer: self.__class__},
  589. operators.truediv: {
  590. Numeric: self.__class__,
  591. Integer: self.__class__,
  592. },
  593. operators.add: {Numeric: self.__class__, Integer: self.__class__},
  594. operators.sub: {Numeric: self.__class__, Integer: self.__class__},
  595. }
  596. class Float(Numeric):
  597. """Type representing floating point types, such as ``FLOAT`` or ``REAL``.
  598. This type returns Python ``float`` objects by default, unless the
  599. :paramref:`.Float.asdecimal` flag is set to True, in which case they
  600. are coerced to ``decimal.Decimal`` objects.
  601. .. note::
  602. The :class:`.Float` type is designed to receive data from a database
  603. type that is explicitly known to be a floating point type
  604. (e.g. ``FLOAT``, ``REAL``, others)
  605. and not a decimal type (e.g. ``DECIMAL``, ``NUMERIC``, others).
  606. If the database column on the server is in fact a Numeric
  607. type, such as ``DECIMAL`` or ``NUMERIC``, use the :class:`.Numeric`
  608. type or a subclass, otherwise numeric coercion between
  609. ``float``/``Decimal`` may or may not function as expected.
  610. """
  611. __visit_name__ = "float"
  612. scale = None
  613. def __init__(
  614. self, precision=None, asdecimal=False, decimal_return_scale=None
  615. ):
  616. r"""
  617. Construct a Float.
  618. :param precision: the numeric precision for use in DDL ``CREATE
  619. TABLE``.
  620. :param asdecimal: the same flag as that of :class:`.Numeric`, but
  621. defaults to ``False``. Note that setting this flag to ``True``
  622. results in floating point conversion.
  623. :param decimal_return_scale: Default scale to use when converting
  624. from floats to Python decimals. Floating point values will typically
  625. be much longer due to decimal inaccuracy, and most floating point
  626. database types don't have a notion of "scale", so by default the
  627. float type looks for the first ten decimal places when converting.
  628. Specifying this value will override that length. Note that the
  629. MySQL float types, which do include "scale", will use "scale"
  630. as the default for decimal_return_scale, if not otherwise specified.
  631. .. versionadded:: 0.9.0
  632. """
  633. self.precision = precision
  634. self.asdecimal = asdecimal
  635. self.decimal_return_scale = decimal_return_scale
  636. def result_processor(self, dialect, coltype):
  637. if self.asdecimal:
  638. return processors.to_decimal_processor_factory(
  639. decimal.Decimal, self._effective_decimal_return_scale
  640. )
  641. elif dialect.supports_native_decimal:
  642. return processors.to_float
  643. else:
  644. return None
  645. class DateTime(_LookupExpressionAdapter, TypeEngine):
  646. """A type for ``datetime.datetime()`` objects.
  647. Date and time types return objects from the Python ``datetime``
  648. module. Most DBAPIs have built in support for the datetime
  649. module, with the noted exception of SQLite. In the case of
  650. SQLite, date and time types are stored as strings which are then
  651. converted back to datetime objects when rows are returned.
  652. For the time representation within the datetime type, some
  653. backends include additional options, such as timezone support and
  654. fractional seconds support. For fractional seconds, use the
  655. dialect-specific datatype, such as :class:`.mysql.TIME`. For
  656. timezone support, use at least the :class:`_types.TIMESTAMP` datatype,
  657. if not the dialect-specific datatype object.
  658. """
  659. __visit_name__ = "datetime"
  660. def __init__(self, timezone=False):
  661. """Construct a new :class:`.DateTime`.
  662. :param timezone: boolean. Indicates that the datetime type should
  663. enable timezone support, if available on the
  664. **base date/time-holding type only**. It is recommended
  665. to make use of the :class:`_types.TIMESTAMP` datatype directly when
  666. using this flag, as some databases include separate generic
  667. date/time-holding types distinct from the timezone-capable
  668. TIMESTAMP datatype, such as Oracle.
  669. """
  670. self.timezone = timezone
  671. def get_dbapi_type(self, dbapi):
  672. return dbapi.DATETIME
  673. @property
  674. def python_type(self):
  675. return dt.datetime
  676. @util.memoized_property
  677. def _expression_adaptations(self):
  678. # Based on http://www.postgresql.org/docs/current/\
  679. # static/functions-datetime.html.
  680. return {
  681. operators.add: {Interval: self.__class__},
  682. operators.sub: {Interval: self.__class__, DateTime: Interval},
  683. }
  684. class Date(_LookupExpressionAdapter, TypeEngine):
  685. """A type for ``datetime.date()`` objects."""
  686. __visit_name__ = "date"
  687. def get_dbapi_type(self, dbapi):
  688. return dbapi.DATETIME
  689. @property
  690. def python_type(self):
  691. return dt.date
  692. @util.memoized_property
  693. def _expression_adaptations(self):
  694. # Based on http://www.postgresql.org/docs/current/\
  695. # static/functions-datetime.html.
  696. return {
  697. operators.add: {
  698. Integer: self.__class__,
  699. Interval: DateTime,
  700. Time: DateTime,
  701. },
  702. operators.sub: {
  703. # date - integer = date
  704. Integer: self.__class__,
  705. # date - date = integer.
  706. Date: Integer,
  707. Interval: DateTime,
  708. # date - datetime = interval,
  709. # this one is not in the PG docs
  710. # but works
  711. DateTime: Interval,
  712. },
  713. }
  714. class Time(_LookupExpressionAdapter, TypeEngine):
  715. """A type for ``datetime.time()`` objects."""
  716. __visit_name__ = "time"
  717. def __init__(self, timezone=False):
  718. self.timezone = timezone
  719. def get_dbapi_type(self, dbapi):
  720. return dbapi.DATETIME
  721. @property
  722. def python_type(self):
  723. return dt.time
  724. @util.memoized_property
  725. def _expression_adaptations(self):
  726. # Based on http://www.postgresql.org/docs/current/\
  727. # static/functions-datetime.html.
  728. return {
  729. operators.add: {Date: DateTime, Interval: self.__class__},
  730. operators.sub: {Time: Interval, Interval: self.__class__},
  731. }
  732. class _Binary(TypeEngine):
  733. """Define base behavior for binary types."""
  734. def __init__(self, length=None):
  735. self.length = length
  736. def literal_processor(self, dialect):
  737. def process(value):
  738. value = value.decode(dialect.encoding).replace("'", "''")
  739. return "'%s'" % value
  740. return process
  741. @property
  742. def python_type(self):
  743. return util.binary_type
  744. # Python 3 - sqlite3 doesn't need the `Binary` conversion
  745. # here, though pg8000 does to indicate "bytea"
  746. def bind_processor(self, dialect):
  747. if dialect.dbapi is None:
  748. return None
  749. DBAPIBinary = dialect.dbapi.Binary
  750. def process(value):
  751. if value is not None:
  752. return DBAPIBinary(value)
  753. else:
  754. return None
  755. return process
  756. # Python 3 has native bytes() type
  757. # both sqlite3 and pg8000 seem to return it,
  758. # psycopg2 as of 2.5 returns 'memoryview'
  759. if util.py2k:
  760. def result_processor(self, dialect, coltype):
  761. return processors.to_str
  762. else:
  763. def result_processor(self, dialect, coltype):
  764. def process(value):
  765. if value is not None:
  766. value = bytes(value)
  767. return value
  768. return process
  769. def coerce_compared_value(self, op, value):
  770. """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
  771. if isinstance(value, util.string_types):
  772. return self
  773. else:
  774. return super(_Binary, self).coerce_compared_value(op, value)
  775. def get_dbapi_type(self, dbapi):
  776. return dbapi.BINARY
  777. class LargeBinary(_Binary):
  778. """A type for large binary byte data.
  779. The :class:`.LargeBinary` type corresponds to a large and/or unlengthed
  780. binary type for the target platform, such as BLOB on MySQL and BYTEA for
  781. PostgreSQL. It also handles the necessary conversions for the DBAPI.
  782. """
  783. __visit_name__ = "large_binary"
  784. def __init__(self, length=None):
  785. """
  786. Construct a LargeBinary type.
  787. :param length: optional, a length for the column for use in
  788. DDL statements, for those binary types that accept a length,
  789. such as the MySQL BLOB type.
  790. """
  791. _Binary.__init__(self, length=length)
  792. class SchemaType(SchemaEventTarget):
  793. """Mark a type as possibly requiring schema-level DDL for usage.
  794. Supports types that must be explicitly created/dropped (i.e. PG ENUM type)
  795. as well as types that are complimented by table or schema level
  796. constraints, triggers, and other rules.
  797. :class:`.SchemaType` classes can also be targets for the
  798. :meth:`.DDLEvents.before_parent_attach` and
  799. :meth:`.DDLEvents.after_parent_attach` events, where the events fire off
  800. surrounding the association of the type object with a parent
  801. :class:`_schema.Column`.
  802. .. seealso::
  803. :class:`.Enum`
  804. :class:`.Boolean`
  805. """
  806. _use_schema_map = True
  807. def __init__(
  808. self,
  809. name=None,
  810. schema=None,
  811. metadata=None,
  812. inherit_schema=False,
  813. quote=None,
  814. _create_events=True,
  815. ):
  816. if name is not None:
  817. self.name = quoted_name(name, quote)
  818. else:
  819. self.name = None
  820. self.schema = schema
  821. self.metadata = metadata
  822. self.inherit_schema = inherit_schema
  823. self._create_events = _create_events
  824. if _create_events and self.metadata:
  825. event.listen(
  826. self.metadata,
  827. "before_create",
  828. util.portable_instancemethod(self._on_metadata_create),
  829. )
  830. event.listen(
  831. self.metadata,
  832. "after_drop",
  833. util.portable_instancemethod(self._on_metadata_drop),
  834. )
  835. def _set_parent(self, column, **kw):
  836. column._on_table_attach(util.portable_instancemethod(self._set_table))
  837. def _variant_mapping_for_set_table(self, column):
  838. if isinstance(column.type, Variant):
  839. variant_mapping = column.type.mapping.copy()
  840. variant_mapping["_default"] = column.type.impl
  841. else:
  842. variant_mapping = None
  843. return variant_mapping
  844. def _set_table(self, column, table):
  845. if self.inherit_schema:
  846. self.schema = table.schema
  847. elif self.metadata and self.schema is None and self.metadata.schema:
  848. self.schema = self.metadata.schema
  849. if not self._create_events:
  850. return
  851. variant_mapping = self._variant_mapping_for_set_table(column)
  852. event.listen(
  853. table,
  854. "before_create",
  855. util.portable_instancemethod(
  856. self._on_table_create, {"variant_mapping": variant_mapping}
  857. ),
  858. )
  859. event.listen(
  860. table,
  861. "after_drop",
  862. util.portable_instancemethod(
  863. self._on_table_drop, {"variant_mapping": variant_mapping}
  864. ),
  865. )
  866. if self.metadata is None:
  867. # TODO: what's the difference between self.metadata
  868. # and table.metadata here ?
  869. event.listen(
  870. table.metadata,
  871. "before_create",
  872. util.portable_instancemethod(
  873. self._on_metadata_create,
  874. {"variant_mapping": variant_mapping},
  875. ),
  876. )
  877. event.listen(
  878. table.metadata,
  879. "after_drop",
  880. util.portable_instancemethod(
  881. self._on_metadata_drop,
  882. {"variant_mapping": variant_mapping},
  883. ),
  884. )
  885. def copy(self, **kw):
  886. return self.adapt(self.__class__, _create_events=True)
  887. def adapt(self, impltype, **kw):
  888. schema = kw.pop("schema", self.schema)
  889. metadata = kw.pop("metadata", self.metadata)
  890. _create_events = kw.pop("_create_events", False)
  891. return impltype(
  892. name=self.name,
  893. schema=schema,
  894. inherit_schema=self.inherit_schema,
  895. metadata=metadata,
  896. _create_events=_create_events,
  897. **kw
  898. )
  899. @property
  900. def bind(self):
  901. return self.metadata and self.metadata.bind or None
  902. def create(self, bind=None, checkfirst=False):
  903. """Issue CREATE DDL for this type, if applicable."""
  904. if bind is None:
  905. bind = _bind_or_error(self)
  906. t = self.dialect_impl(bind.dialect)
  907. if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
  908. t.create(bind=bind, checkfirst=checkfirst)
  909. def drop(self, bind=None, checkfirst=False):
  910. """Issue DROP DDL for this type, if applicable."""
  911. if bind is None:
  912. bind = _bind_or_error(self)
  913. t = self.dialect_impl(bind.dialect)
  914. if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
  915. t.drop(bind=bind, checkfirst=checkfirst)
  916. def _on_table_create(self, target, bind, **kw):
  917. if not self._is_impl_for_variant(bind.dialect, kw):
  918. return
  919. t = self.dialect_impl(bind.dialect)
  920. if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
  921. t._on_table_create(target, bind, **kw)
  922. def _on_table_drop(self, target, bind, **kw):
  923. if not self._is_impl_for_variant(bind.dialect, kw):
  924. return
  925. t = self.dialect_impl(bind.dialect)
  926. if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
  927. t._on_table_drop(target, bind, **kw)
  928. def _on_metadata_create(self, target, bind, **kw):
  929. if not self._is_impl_for_variant(bind.dialect, kw):
  930. return
  931. t = self.dialect_impl(bind.dialect)
  932. if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
  933. t._on_metadata_create(target, bind, **kw)
  934. def _on_metadata_drop(self, target, bind, **kw):
  935. if not self._is_impl_for_variant(bind.dialect, kw):
  936. return
  937. t = self.dialect_impl(bind.dialect)
  938. if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
  939. t._on_metadata_drop(target, bind, **kw)
  940. def _is_impl_for_variant(self, dialect, kw):
  941. variant_mapping = kw.pop("variant_mapping", None)
  942. if variant_mapping is None:
  943. return True
  944. # since PostgreSQL is the only DB that has ARRAY this can only
  945. # be integration tested by PG-specific tests
  946. def _we_are_the_impl(typ):
  947. return (
  948. typ is self or isinstance(typ, ARRAY) and typ.item_type is self
  949. )
  950. if dialect.name in variant_mapping and _we_are_the_impl(
  951. variant_mapping[dialect.name]
  952. ):
  953. return True
  954. elif dialect.name not in variant_mapping:
  955. return _we_are_the_impl(variant_mapping["_default"])
  956. class Enum(Emulated, String, SchemaType):
  957. """Generic Enum Type.
  958. The :class:`.Enum` type provides a set of possible string values
  959. which the column is constrained towards.
  960. The :class:`.Enum` type will make use of the backend's native "ENUM"
  961. type if one is available; otherwise, it uses a VARCHAR datatype.
  962. An option also exists to automatically produce a CHECK constraint
  963. when the VARCHAR (so called "non-native") variant is produced;
  964. see the :paramref:`.Enum.create_constraint` flag.
  965. The :class:`.Enum` type also provides in-Python validation of string
  966. values during both read and write operations. When reading a value
  967. from the database in a result set, the string value is always checked
  968. against the list of possible values and a ``LookupError`` is raised
  969. if no match is found. When passing a value to the database as a
  970. plain string within a SQL statement, if the
  971. :paramref:`.Enum.validate_strings` parameter is
  972. set to True, a ``LookupError`` is raised for any string value that's
  973. not located in the given list of possible values; note that this
  974. impacts usage of LIKE expressions with enumerated values (an unusual
  975. use case).
  976. .. versionchanged:: 1.1 the :class:`.Enum` type now provides in-Python
  977. validation of input values as well as on data being returned by
  978. the database.
  979. The source of enumerated values may be a list of string values, or
  980. alternatively a PEP-435-compliant enumerated class. For the purposes
  981. of the :class:`.Enum` datatype, this class need only provide a
  982. ``__members__`` method.
  983. When using an enumerated class, the enumerated objects are used
  984. both for input and output, rather than strings as is the case with
  985. a plain-string enumerated type::
  986. import enum
  987. class MyEnum(enum.Enum):
  988. one = 1
  989. two = 2
  990. three = 3
  991. t = Table(
  992. 'data', MetaData(),
  993. Column('value', Enum(MyEnum))
  994. )
  995. connection.execute(t.insert(), {"value": MyEnum.two})
  996. assert connection.scalar(t.select()) is MyEnum.two
  997. Above, the string names of each element, e.g. "one", "two", "three",
  998. are persisted to the database; the values of the Python Enum, here
  999. indicated as integers, are **not** used; the value of each enum can
  1000. therefore be any kind of Python object whether or not it is persistable.
  1001. In order to persist the values and not the names, the
  1002. :paramref:`.Enum.values_callable` parameter may be used. The value of
  1003. this parameter is a user-supplied callable, which is intended to be used
  1004. with a PEP-435-compliant enumerated class and returns a list of string
  1005. values to be persisted. For a simple enumeration that uses string values,
  1006. a callable such as ``lambda x: [e.value for e in x]`` is sufficient.
  1007. .. versionadded:: 1.1 - support for PEP-435-style enumerated
  1008. classes.
  1009. .. seealso::
  1010. :class:`_postgresql.ENUM` - PostgreSQL-specific type,
  1011. which has additional functionality.
  1012. :class:`.mysql.ENUM` - MySQL-specific type
  1013. """
  1014. __visit_name__ = "enum"
  1015. @util.deprecated_params(
  1016. convert_unicode=(
  1017. "1.3",
  1018. "The :paramref:`.Enum.convert_unicode` parameter is deprecated "
  1019. "and will be removed in a future release. All modern DBAPIs "
  1020. "now support Python Unicode directly and this parameter is "
  1021. "unnecessary.",
  1022. )
  1023. )
  1024. def __init__(self, *enums, **kw):
  1025. r"""Construct an enum.
  1026. Keyword arguments which don't apply to a specific backend are ignored
  1027. by that backend.
  1028. :param \*enums: either exactly one PEP-435 compliant enumerated type
  1029. or one or more string labels.
  1030. .. versionadded:: 1.1 a PEP-435 style enumerated class may be
  1031. passed.
  1032. :param convert_unicode: Enable unicode-aware bind parameter and
  1033. result-set processing for this Enum's data under Python 2 only.
  1034. Under Python 2, this is set automatically based on the presence of
  1035. unicode label strings. This flag will be removed in SQLAlchemy 2.0.
  1036. :param create_constraint: defaults to False. When creating a
  1037. non-native enumerated type, also build a CHECK constraint on the
  1038. database against the valid values.
  1039. .. note:: it is strongly recommended that the CHECK constraint
  1040. have an explicit name in order to support schema-management
  1041. concerns. This can be established either by setting the
  1042. :paramref:`.Enum.name` parameter or by setting up an
  1043. appropriate naming convention; see
  1044. :ref:`constraint_naming_conventions` for background.
  1045. .. versionchanged:: 1.4 - this flag now defaults to False, meaning
  1046. no CHECK constraint is generated for a non-native enumerated
  1047. type.
  1048. :param metadata: Associate this type directly with a ``MetaData``
  1049. object. For types that exist on the target database as an
  1050. independent schema construct (PostgreSQL), this type will be
  1051. created and dropped within ``create_all()`` and ``drop_all()``
  1052. operations. If the type is not associated with any ``MetaData``
  1053. object, it will associate itself with each ``Table`` in which it is
  1054. used, and will be created when any of those individual tables are
  1055. created, after a check is performed for its existence. The type is
  1056. only dropped when ``drop_all()`` is called for that ``Table``
  1057. object's metadata, however.
  1058. The value of the :paramref:`_schema.MetaData.schema` parameter of
  1059. the :class:`_schema.MetaData` object, if set, will be used as the
  1060. default value of the :paramref:`_types.Enum.schema` on this object
  1061. if an explicit value is not otherwise supplied.
  1062. .. versionchanged:: 1.4.12 :class:`_types.Enum` inherits the
  1063. :paramref:`_schema.MetaData.schema` parameter of the
  1064. :class:`_schema.MetaData` object if present, when passed using
  1065. the :paramref:`_types.Enum.metadata` parameter.
  1066. :param name: The name of this type. This is required for PostgreSQL
  1067. and any future supported database which requires an explicitly
  1068. named type, or an explicitly named constraint in order to generate
  1069. the type and/or a table that uses it. If a PEP-435 enumerated
  1070. class was used, its name (converted to lower case) is used by
  1071. default.
  1072. :param native_enum: Use the database's native ENUM type when
  1073. available. Defaults to True. When False, uses VARCHAR + check
  1074. constraint for all backends. The VARCHAR length can be controlled
  1075. with :paramref:`.Enum.length`
  1076. :param length: Allows specifying a custom length for the VARCHAR
  1077. when :paramref:`.Enum.native_enum` is False. By default it uses the
  1078. length of the longest value.
  1079. .. versionadded:: 1.3.16
  1080. :param schema: Schema name of this type. For types that exist on the
  1081. target database as an independent schema construct (PostgreSQL),
  1082. this parameter specifies the named schema in which the type is
  1083. present.
  1084. If not present, the schema name will be taken from the
  1085. :class:`_schema.MetaData` collection if passed as
  1086. :paramref:`_types.Enum.metadata`, for a :class:`_schema.MetaData`
  1087. that includes the :paramref:`_schema.MetaData.schema` parameter.
  1088. .. versionchanged:: 1.4.12 :class:`_types.Enum` inherits the
  1089. :paramref:`_schema.MetaData.schema` parameter of the
  1090. :class:`_schema.MetaData` object if present, when passed using
  1091. the :paramref:`_types.Enum.metadata` parameter.
  1092. Otherwise, if the :paramref:`_types.Enum.inherit_schema` flag is set
  1093. to ``True``, the schema will be inherited from the associated
  1094. :class:`_schema.Table` object if any; when
  1095. :paramref:`_types.Enum.inherit_schema` is at its default of
  1096. ``False``, the owning table's schema is **not** used.
  1097. :param quote: Set explicit quoting preferences for the type's name.
  1098. :param inherit_schema: When ``True``, the "schema" from the owning
  1099. :class:`_schema.Table`
  1100. will be copied to the "schema" attribute of this
  1101. :class:`.Enum`, replacing whatever value was passed for the
  1102. ``schema`` attribute. This also takes effect when using the
  1103. :meth:`_schema.Table.to_metadata` operation.
  1104. :param validate_strings: when True, string values that are being
  1105. passed to the database in a SQL statement will be checked
  1106. for validity against the list of enumerated values. Unrecognized
  1107. values will result in a ``LookupError`` being raised.
  1108. .. versionadded:: 1.1.0b2
  1109. :param values_callable: A callable which will be passed the PEP-435
  1110. compliant enumerated type, which should then return a list of string
  1111. values to be persisted. This allows for alternate usages such as
  1112. using the string value of an enum to be persisted to the database
  1113. instead of its name.
  1114. .. versionadded:: 1.2.3
  1115. :param sort_key_function: a Python callable which may be used as the
  1116. "key" argument in the Python ``sorted()`` built-in. The SQLAlchemy
  1117. ORM requires that primary key columns which are mapped must
  1118. be sortable in some way. When using an unsortable enumeration
  1119. object such as a Python 3 ``Enum`` object, this parameter may be
  1120. used to set a default sort key function for the objects. By
  1121. default, the database value of the enumeration is used as the
  1122. sorting function.
  1123. .. versionadded:: 1.3.8
  1124. :param omit_aliases: A boolean that when true will remove aliases from
  1125. pep 435 enums. For backward compatibility it defaults to ``False``.
  1126. A deprecation warning is raised if the enum has aliases and this
  1127. flag was not set.
  1128. .. versionadded:: 1.4.5
  1129. .. deprecated:: 1.4 The default will be changed to ``True`` in
  1130. SQLAlchemy 2.0.
  1131. """
  1132. self._enum_init(enums, kw)
  1133. @property
  1134. def _enums_argument(self):
  1135. if self.enum_class is not None:
  1136. return [self.enum_class]
  1137. else:
  1138. return self.enums
  1139. def _enum_init(self, enums, kw):
  1140. """internal init for :class:`.Enum` and subclasses.
  1141. friendly init helper used by subclasses to remove
  1142. all the Enum-specific keyword arguments from kw. Allows all
  1143. other arguments in kw to pass through.
  1144. """
  1145. self.native_enum = kw.pop("native_enum", True)
  1146. self.create_constraint = kw.pop("create_constraint", False)
  1147. self.values_callable = kw.pop("values_callable", None)
  1148. self._sort_key_function = kw.pop("sort_key_function", NO_ARG)
  1149. length_arg = kw.pop("length", NO_ARG)
  1150. self._omit_aliases = kw.pop("omit_aliases", NO_ARG)
  1151. values, objects = self._parse_into_values(enums, kw)
  1152. self._setup_for_values(values, objects, kw)
  1153. convert_unicode = kw.pop("convert_unicode", None)
  1154. self.validate_strings = kw.pop("validate_strings", False)
  1155. if convert_unicode is None:
  1156. for e in self.enums:
  1157. # this is all py2k logic that can go away for py3k only,
  1158. # "expect unicode" will always be implicitly true
  1159. if isinstance(e, util.text_type):
  1160. _expect_unicode = True
  1161. break
  1162. else:
  1163. _expect_unicode = False
  1164. else:
  1165. _expect_unicode = convert_unicode
  1166. if self.enums:
  1167. length = max(len(x) for x in self.enums)
  1168. else:
  1169. length = 0
  1170. if not self.native_enum and length_arg is not NO_ARG:
  1171. if length_arg < length:
  1172. raise ValueError(
  1173. "When provided, length must be larger or equal"
  1174. " than the length of the longest enum value. %s < %s"
  1175. % (length_arg, length)
  1176. )
  1177. length = length_arg
  1178. self._valid_lookup[None] = self._object_lookup[None] = None
  1179. super(Enum, self).__init__(
  1180. length=length, _expect_unicode=_expect_unicode
  1181. )
  1182. if self.enum_class:
  1183. kw.setdefault("name", self.enum_class.__name__.lower())
  1184. SchemaType.__init__(
  1185. self,
  1186. name=kw.pop("name", None),
  1187. schema=kw.pop("schema", None),
  1188. metadata=kw.pop("metadata", None),
  1189. inherit_schema=kw.pop("inherit_schema", False),
  1190. quote=kw.pop("quote", None),
  1191. _create_events=kw.pop("_create_events", True),
  1192. )
  1193. def _parse_into_values(self, enums, kw):
  1194. if not enums and "_enums" in kw:
  1195. enums = kw.pop("_enums")
  1196. if len(enums) == 1 and hasattr(enums[0], "__members__"):
  1197. self.enum_class = enums[0]
  1198. _members = self.enum_class.__members__
  1199. aliases = [n for n, v in _members.items() if v.name != n]
  1200. if self._omit_aliases is NO_ARG and aliases:
  1201. util.warn_deprecated_20(
  1202. "The provided enum %s contains the aliases %s. The "
  1203. "``omit_aliases`` will default to ``True`` in SQLAlchemy "
  1204. "2.0. Specify a value to silence this warning."
  1205. % (self.enum_class.__name__, aliases)
  1206. )
  1207. if self._omit_aliases is True:
  1208. # remove aliases
  1209. members = OrderedDict(
  1210. (n, v) for n, v in _members.items() if v.name == n
  1211. )
  1212. else:
  1213. members = _members
  1214. if self.values_callable:
  1215. values = self.values_callable(self.enum_class)
  1216. else:
  1217. values = list(members)
  1218. objects = [members[k] for k in members]
  1219. return values, objects
  1220. else:
  1221. self.enum_class = None
  1222. return enums, enums
  1223. def _setup_for_values(self, values, objects, kw):
  1224. self.enums = list(values)
  1225. self._valid_lookup = dict(zip(reversed(objects), reversed(values)))
  1226. self._object_lookup = dict(zip(values, objects))
  1227. self._valid_lookup.update(
  1228. [
  1229. (value, self._valid_lookup[self._object_lookup[value]])
  1230. for value in values
  1231. ]
  1232. )
  1233. @property
  1234. def sort_key_function(self):
  1235. if self._sort_key_function is NO_ARG:
  1236. return self._db_value_for_elem
  1237. else:
  1238. return self._sort_key_function
  1239. @property
  1240. def native(self):
  1241. return self.native_enum
  1242. def _db_value_for_elem(self, elem):
  1243. try:
  1244. return self._valid_lookup[elem]
  1245. except KeyError as err:
  1246. # for unknown string values, we return as is. While we can
  1247. # validate these if we wanted, that does not allow for lesser-used
  1248. # end-user use cases, such as using a LIKE comparison with an enum,
  1249. # or for an application that wishes to apply string tests to an
  1250. # ENUM (see [ticket:3725]). While we can decide to differentiate
  1251. # here between an INSERT statement and a criteria used in a SELECT,
  1252. # for now we're staying conservative w/ behavioral changes (perhaps
  1253. # someone has a trigger that handles strings on INSERT)
  1254. if not self.validate_strings and isinstance(
  1255. elem, compat.string_types
  1256. ):
  1257. return elem
  1258. else:
  1259. util.raise_(
  1260. LookupError(
  1261. "'%s' is not among the defined enum values. "
  1262. "Enum name: %s. Possible values: %s"
  1263. % (
  1264. elem,
  1265. self.name,
  1266. langhelpers.repr_tuple_names(self.enums),
  1267. )
  1268. ),
  1269. replace_context=err,
  1270. )
  1271. class Comparator(String.Comparator):
  1272. def _adapt_expression(self, op, other_comparator):
  1273. op, typ = super(Enum.Comparator, self)._adapt_expression(
  1274. op, other_comparator
  1275. )
  1276. if op is operators.concat_op:
  1277. typ = String(
  1278. self.type.length, _expect_unicode=self.type._expect_unicode
  1279. )
  1280. return op, typ
  1281. comparator_factory = Comparator
  1282. def _object_value_for_elem(self, elem):
  1283. try:
  1284. return self._object_lookup[elem]
  1285. except KeyError as err:
  1286. util.raise_(
  1287. LookupError(
  1288. "'%s' is not among the defined enum values. "
  1289. "Enum name: %s. Possible values: %s"
  1290. % (
  1291. elem,
  1292. self.name,
  1293. langhelpers.repr_tuple_names(self.enums),
  1294. )
  1295. ),
  1296. replace_context=err,
  1297. )
  1298. def __repr__(self):
  1299. return util.generic_repr(
  1300. self,
  1301. additional_kw=[("native_enum", True)],
  1302. to_inspect=[Enum, SchemaType],
  1303. )
  1304. def as_generic(self, allow_nulltype=False):
  1305. if hasattr(self, "enums"):
  1306. args = self.enums
  1307. else:
  1308. raise NotImplementedError(
  1309. "TypeEngine.as_generic() heuristic "
  1310. "is undefined for types that inherit Enum but do not have "
  1311. "an `enums` attribute."
  1312. )
  1313. return util.constructor_copy(self, self._generic_type_affinity, *args)
  1314. def adapt_to_emulated(self, impltype, **kw):
  1315. kw.setdefault("_expect_unicode", self._expect_unicode)
  1316. kw.setdefault("validate_strings", self.validate_strings)
  1317. kw.setdefault("name", self.name)
  1318. kw.setdefault("schema", self.schema)
  1319. kw.setdefault("inherit_schema", self.inherit_schema)
  1320. kw.setdefault("metadata", self.metadata)
  1321. kw.setdefault("_create_events", False)
  1322. kw.setdefault("native_enum", self.native_enum)
  1323. kw.setdefault("values_callable", self.values_callable)
  1324. kw.setdefault("create_constraint", self.create_constraint)
  1325. kw.setdefault("length", self.length)
  1326. kw.setdefault("omit_aliases", self._omit_aliases)
  1327. assert "_enums" in kw
  1328. return impltype(**kw)
  1329. def adapt(self, impltype, **kw):
  1330. kw["_enums"] = self._enums_argument
  1331. return super(Enum, self).adapt(impltype, **kw)
  1332. def _should_create_constraint(self, compiler, **kw):
  1333. if not self._is_impl_for_variant(compiler.dialect, kw):
  1334. return False
  1335. return (
  1336. not self.native_enum or not compiler.dialect.supports_native_enum
  1337. )
  1338. @util.preload_module("sqlalchemy.sql.schema")
  1339. def _set_table(self, column, table):
  1340. schema = util.preloaded.sql_schema
  1341. SchemaType._set_table(self, column, table)
  1342. if not self.create_constraint:
  1343. return
  1344. variant_mapping = self._variant_mapping_for_set_table(column)
  1345. e = schema.CheckConstraint(
  1346. type_coerce(column, self).in_(self.enums),
  1347. name=_NONE_NAME if self.name is None else self.name,
  1348. _create_rule=util.portable_instancemethod(
  1349. self._should_create_constraint,
  1350. {"variant_mapping": variant_mapping},
  1351. ),
  1352. _type_bound=True,
  1353. )
  1354. assert e.table is table
  1355. def literal_processor(self, dialect):
  1356. parent_processor = super(Enum, self).literal_processor(dialect)
  1357. def process(value):
  1358. value = self._db_value_for_elem(value)
  1359. if parent_processor:
  1360. value = parent_processor(value)
  1361. return value
  1362. return process
  1363. def bind_processor(self, dialect):
  1364. def process(value):
  1365. value = self._db_value_for_elem(value)
  1366. if parent_processor:
  1367. value = parent_processor(value)
  1368. return value
  1369. parent_processor = super(Enum, self).bind_processor(dialect)
  1370. return process
  1371. def result_processor(self, dialect, coltype):
  1372. parent_processor = super(Enum, self).result_processor(dialect, coltype)
  1373. def process(value):
  1374. if parent_processor:
  1375. value = parent_processor(value)
  1376. value = self._object_value_for_elem(value)
  1377. return value
  1378. return process
  1379. def copy(self, **kw):
  1380. return SchemaType.copy(self, **kw)
  1381. @property
  1382. def python_type(self):
  1383. if self.enum_class:
  1384. return self.enum_class
  1385. else:
  1386. return super(Enum, self).python_type
  1387. class PickleType(TypeDecorator):
  1388. """Holds Python objects, which are serialized using pickle.
  1389. PickleType builds upon the Binary type to apply Python's
  1390. ``pickle.dumps()`` to incoming objects, and ``pickle.loads()`` on
  1391. the way out, allowing any pickleable Python object to be stored as
  1392. a serialized binary field.
  1393. To allow ORM change events to propagate for elements associated
  1394. with :class:`.PickleType`, see :ref:`mutable_toplevel`.
  1395. """
  1396. impl = LargeBinary
  1397. cache_ok = True
  1398. def __init__(
  1399. self,
  1400. protocol=pickle.HIGHEST_PROTOCOL,
  1401. pickler=None,
  1402. comparator=None,
  1403. impl=None,
  1404. ):
  1405. """
  1406. Construct a PickleType.
  1407. :param protocol: defaults to ``pickle.HIGHEST_PROTOCOL``.
  1408. :param pickler: defaults to cPickle.pickle or pickle.pickle if
  1409. cPickle is not available. May be any object with
  1410. pickle-compatible ``dumps`` and ``loads`` methods.
  1411. :param comparator: a 2-arg callable predicate used
  1412. to compare values of this type. If left as ``None``,
  1413. the Python "equals" operator is used to compare values.
  1414. :param impl: A binary-storing :class:`_types.TypeEngine` class or
  1415. instance to use in place of the default :class:`_types.LargeBinary`.
  1416. For example the :class: `_mysql.LONGBLOB` class may be more effective
  1417. when using MySQL.
  1418. .. versionadded:: 1.4.20
  1419. """
  1420. self.protocol = protocol
  1421. self.pickler = pickler or pickle
  1422. self.comparator = comparator
  1423. super(PickleType, self).__init__()
  1424. if impl:
  1425. self.impl = to_instance(impl)
  1426. def __reduce__(self):
  1427. return PickleType, (self.protocol, None, self.comparator)
  1428. def bind_processor(self, dialect):
  1429. impl_processor = self.impl.bind_processor(dialect)
  1430. dumps = self.pickler.dumps
  1431. protocol = self.protocol
  1432. if impl_processor:
  1433. def process(value):
  1434. if value is not None:
  1435. value = dumps(value, protocol)
  1436. return impl_processor(value)
  1437. else:
  1438. def process(value):
  1439. if value is not None:
  1440. value = dumps(value, protocol)
  1441. return value
  1442. return process
  1443. def result_processor(self, dialect, coltype):
  1444. impl_processor = self.impl.result_processor(dialect, coltype)
  1445. loads = self.pickler.loads
  1446. if impl_processor:
  1447. def process(value):
  1448. value = impl_processor(value)
  1449. if value is None:
  1450. return None
  1451. return loads(value)
  1452. else:
  1453. def process(value):
  1454. if value is None:
  1455. return None
  1456. return loads(value)
  1457. return process
  1458. def compare_values(self, x, y):
  1459. if self.comparator:
  1460. return self.comparator(x, y)
  1461. else:
  1462. return x == y
  1463. class Boolean(Emulated, TypeEngine, SchemaType):
  1464. """A bool datatype.
  1465. :class:`.Boolean` typically uses BOOLEAN or SMALLINT on the DDL side,
  1466. and on the Python side deals in ``True`` or ``False``.
  1467. The :class:`.Boolean` datatype currently has two levels of assertion
  1468. that the values persisted are simple true/false values. For all
  1469. backends, only the Python values ``None``, ``True``, ``False``, ``1``
  1470. or ``0`` are accepted as parameter values. For those backends that
  1471. don't support a "native boolean" datatype, an option exists to
  1472. also create a CHECK constraint on the target column
  1473. .. versionchanged:: 1.2 the :class:`.Boolean` datatype now asserts that
  1474. incoming Python values are already in pure boolean form.
  1475. """
  1476. __visit_name__ = "boolean"
  1477. native = True
  1478. def __init__(
  1479. self, create_constraint=False, name=None, _create_events=True
  1480. ):
  1481. """Construct a Boolean.
  1482. :param create_constraint: defaults to False. If the boolean
  1483. is generated as an int/smallint, also create a CHECK constraint
  1484. on the table that ensures 1 or 0 as a value.
  1485. .. note:: it is strongly recommended that the CHECK constraint
  1486. have an explicit name in order to support schema-management
  1487. concerns. This can be established either by setting the
  1488. :paramref:`.Boolean.name` parameter or by setting up an
  1489. appropriate naming convention; see
  1490. :ref:`constraint_naming_conventions` for background.
  1491. .. versionchanged:: 1.4 - this flag now defaults to False, meaning
  1492. no CHECK constraint is generated for a non-native enumerated
  1493. type.
  1494. :param name: if a CHECK constraint is generated, specify
  1495. the name of the constraint.
  1496. """
  1497. self.create_constraint = create_constraint
  1498. self.name = name
  1499. self._create_events = _create_events
  1500. def _should_create_constraint(self, compiler, **kw):
  1501. if not self._is_impl_for_variant(compiler.dialect, kw):
  1502. return False
  1503. return (
  1504. not compiler.dialect.supports_native_boolean
  1505. and compiler.dialect.non_native_boolean_check_constraint
  1506. )
  1507. @util.preload_module("sqlalchemy.sql.schema")
  1508. def _set_table(self, column, table):
  1509. schema = util.preloaded.sql_schema
  1510. if not self.create_constraint:
  1511. return
  1512. variant_mapping = self._variant_mapping_for_set_table(column)
  1513. e = schema.CheckConstraint(
  1514. type_coerce(column, self).in_([0, 1]),
  1515. name=_NONE_NAME if self.name is None else self.name,
  1516. _create_rule=util.portable_instancemethod(
  1517. self._should_create_constraint,
  1518. {"variant_mapping": variant_mapping},
  1519. ),
  1520. _type_bound=True,
  1521. )
  1522. assert e.table is table
  1523. @property
  1524. def python_type(self):
  1525. return bool
  1526. _strict_bools = frozenset([None, True, False])
  1527. def _strict_as_bool(self, value):
  1528. if value not in self._strict_bools:
  1529. if not isinstance(value, int):
  1530. raise TypeError("Not a boolean value: %r" % value)
  1531. else:
  1532. raise ValueError(
  1533. "Value %r is not None, True, or False" % value
  1534. )
  1535. return value
  1536. def literal_processor(self, dialect):
  1537. compiler = dialect.statement_compiler(dialect, None)
  1538. true = compiler.visit_true(None)
  1539. false = compiler.visit_false(None)
  1540. def process(value):
  1541. return true if self._strict_as_bool(value) else false
  1542. return process
  1543. def bind_processor(self, dialect):
  1544. _strict_as_bool = self._strict_as_bool
  1545. if dialect.supports_native_boolean:
  1546. _coerce = bool
  1547. else:
  1548. _coerce = int
  1549. def process(value):
  1550. value = _strict_as_bool(value)
  1551. if value is not None:
  1552. value = _coerce(value)
  1553. return value
  1554. return process
  1555. def result_processor(self, dialect, coltype):
  1556. if dialect.supports_native_boolean:
  1557. return None
  1558. else:
  1559. return processors.int_to_boolean
  1560. class _AbstractInterval(_LookupExpressionAdapter, TypeEngine):
  1561. @util.memoized_property
  1562. def _expression_adaptations(self):
  1563. # Based on http://www.postgresql.org/docs/current/\
  1564. # static/functions-datetime.html.
  1565. return {
  1566. operators.add: {
  1567. Date: DateTime,
  1568. Interval: self.__class__,
  1569. DateTime: DateTime,
  1570. Time: Time,
  1571. },
  1572. operators.sub: {Interval: self.__class__},
  1573. operators.mul: {Numeric: self.__class__},
  1574. operators.truediv: {Numeric: self.__class__},
  1575. operators.div: {Numeric: self.__class__},
  1576. }
  1577. @property
  1578. def _type_affinity(self):
  1579. return Interval
  1580. def coerce_compared_value(self, op, value):
  1581. """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
  1582. return self.impl.coerce_compared_value(op, value)
  1583. class Interval(Emulated, _AbstractInterval, TypeDecorator):
  1584. """A type for ``datetime.timedelta()`` objects.
  1585. The Interval type deals with ``datetime.timedelta`` objects. In
  1586. PostgreSQL, the native ``INTERVAL`` type is used; for others, the
  1587. value is stored as a date which is relative to the "epoch"
  1588. (Jan. 1, 1970).
  1589. Note that the ``Interval`` type does not currently provide date arithmetic
  1590. operations on platforms which do not support interval types natively. Such
  1591. operations usually require transformation of both sides of the expression
  1592. (such as, conversion of both sides into integer epoch values first) which
  1593. currently is a manual procedure (such as via
  1594. :attr:`~sqlalchemy.sql.expression.func`).
  1595. """
  1596. impl = DateTime
  1597. epoch = dt.datetime.utcfromtimestamp(0)
  1598. cache_ok = True
  1599. def __init__(self, native=True, second_precision=None, day_precision=None):
  1600. """Construct an Interval object.
  1601. :param native: when True, use the actual
  1602. INTERVAL type provided by the database, if
  1603. supported (currently PostgreSQL, Oracle).
  1604. Otherwise, represent the interval data as
  1605. an epoch value regardless.
  1606. :param second_precision: For native interval types
  1607. which support a "fractional seconds precision" parameter,
  1608. i.e. Oracle and PostgreSQL
  1609. :param day_precision: for native interval types which
  1610. support a "day precision" parameter, i.e. Oracle.
  1611. """
  1612. super(Interval, self).__init__()
  1613. self.native = native
  1614. self.second_precision = second_precision
  1615. self.day_precision = day_precision
  1616. @property
  1617. def python_type(self):
  1618. return dt.timedelta
  1619. def adapt_to_emulated(self, impltype, **kw):
  1620. return _AbstractInterval.adapt(self, impltype, **kw)
  1621. def bind_processor(self, dialect):
  1622. impl_processor = self.impl.bind_processor(dialect)
  1623. epoch = self.epoch
  1624. if impl_processor:
  1625. def process(value):
  1626. if value is not None:
  1627. value = epoch + value
  1628. return impl_processor(value)
  1629. else:
  1630. def process(value):
  1631. if value is not None:
  1632. value = epoch + value
  1633. return value
  1634. return process
  1635. def result_processor(self, dialect, coltype):
  1636. impl_processor = self.impl.result_processor(dialect, coltype)
  1637. epoch = self.epoch
  1638. if impl_processor:
  1639. def process(value):
  1640. value = impl_processor(value)
  1641. if value is None:
  1642. return None
  1643. return value - epoch
  1644. else:
  1645. def process(value):
  1646. if value is None:
  1647. return None
  1648. return value - epoch
  1649. return process
  1650. class JSON(Indexable, TypeEngine):
  1651. """Represent a SQL JSON type.
  1652. .. note:: :class:`_types.JSON`
  1653. is provided as a facade for vendor-specific
  1654. JSON types. Since it supports JSON SQL operations, it only
  1655. works on backends that have an actual JSON type, currently:
  1656. * PostgreSQL - see :class:`sqlalchemy.dialects.postgresql.JSON` and
  1657. :class:`sqlalchemy.dialects.postgresql.JSONB` for backend-specific
  1658. notes
  1659. * MySQL - see
  1660. :class:`sqlalchemy.dialects.mysql.JSON` for backend-specific notes
  1661. * SQLite as of version 3.9 - see
  1662. :class:`sqlalchemy.dialects.sqlite.JSON` for backend-specific notes
  1663. * Microsoft SQL Server 2016 and later - see
  1664. :class:`sqlalchemy.dialects.mssql.JSON` for backend-specific notes
  1665. :class:`_types.JSON` is part of the Core in support of the growing
  1666. popularity of native JSON datatypes.
  1667. The :class:`_types.JSON` type stores arbitrary JSON format data, e.g.::
  1668. data_table = Table('data_table', metadata,
  1669. Column('id', Integer, primary_key=True),
  1670. Column('data', JSON)
  1671. )
  1672. with engine.connect() as conn:
  1673. conn.execute(
  1674. data_table.insert(),
  1675. data = {"key1": "value1", "key2": "value2"}
  1676. )
  1677. **JSON-Specific Expression Operators**
  1678. The :class:`_types.JSON`
  1679. datatype provides these additional SQL operations:
  1680. * Keyed index operations::
  1681. data_table.c.data['some key']
  1682. * Integer index operations::
  1683. data_table.c.data[3]
  1684. * Path index operations::
  1685. data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')]
  1686. * Data casters for specific JSON element types, subsequent to an index
  1687. or path operation being invoked::
  1688. data_table.c.data["some key"].as_integer()
  1689. .. versionadded:: 1.3.11
  1690. Additional operations may be available from the dialect-specific versions
  1691. of :class:`_types.JSON`, such as
  1692. :class:`sqlalchemy.dialects.postgresql.JSON` and
  1693. :class:`sqlalchemy.dialects.postgresql.JSONB` which both offer additional
  1694. PostgreSQL-specific operations.
  1695. **Casting JSON Elements to Other Types**
  1696. Index operations, i.e. those invoked by calling upon the expression using
  1697. the Python bracket operator as in ``some_column['some key']``, return an
  1698. expression object whose type defaults to :class:`_types.JSON` by default,
  1699. so that
  1700. further JSON-oriented instructions may be called upon the result type.
  1701. However, it is likely more common that an index operation is expected
  1702. to return a specific scalar element, such as a string or integer. In
  1703. order to provide access to these elements in a backend-agnostic way,
  1704. a series of data casters are provided:
  1705. * :meth:`.JSON.Comparator.as_string` - return the element as a string
  1706. * :meth:`.JSON.Comparator.as_boolean` - return the element as a boolean
  1707. * :meth:`.JSON.Comparator.as_float` - return the element as a float
  1708. * :meth:`.JSON.Comparator.as_integer` - return the element as an integer
  1709. These data casters are implemented by supporting dialects in order to
  1710. assure that comparisons to the above types will work as expected, such as::
  1711. # integer comparison
  1712. data_table.c.data["some_integer_key"].as_integer() == 5
  1713. # boolean comparison
  1714. data_table.c.data["some_boolean"].as_boolean() == True
  1715. .. versionadded:: 1.3.11 Added type-specific casters for the basic JSON
  1716. data element types.
  1717. .. note::
  1718. The data caster functions are new in version 1.3.11, and supersede
  1719. the previous documented approaches of using CAST; for reference,
  1720. this looked like::
  1721. from sqlalchemy import cast, type_coerce
  1722. from sqlalchemy import String, JSON
  1723. cast(
  1724. data_table.c.data['some_key'], String
  1725. ) == type_coerce(55, JSON)
  1726. The above case now works directly as::
  1727. data_table.c.data['some_key'].as_integer() == 5
  1728. For details on the previous comparison approach within the 1.3.x
  1729. series, see the documentation for SQLAlchemy 1.2 or the included HTML
  1730. files in the doc/ directory of the version's distribution.
  1731. **Detecting Changes in JSON columns when using the ORM**
  1732. The :class:`_types.JSON` type, when used with the SQLAlchemy ORM, does not
  1733. detect in-place mutations to the structure. In order to detect these, the
  1734. :mod:`sqlalchemy.ext.mutable` extension must be used. This extension will
  1735. allow "in-place" changes to the datastructure to produce events which
  1736. will be detected by the unit of work. See the example at :class:`.HSTORE`
  1737. for a simple example involving a dictionary.
  1738. **Support for JSON null vs. SQL NULL**
  1739. When working with NULL values, the :class:`_types.JSON`
  1740. type recommends the
  1741. use of two specific constants in order to differentiate between a column
  1742. that evaluates to SQL NULL, e.g. no value, vs. the JSON-encoded string
  1743. of ``"null"``. To insert or select against a value that is SQL NULL,
  1744. use the constant :func:`.null`::
  1745. from sqlalchemy import null
  1746. conn.execute(table.insert(), json_value=null())
  1747. To insert or select against a value that is JSON ``"null"``, use the
  1748. constant :attr:`_types.JSON.NULL`::
  1749. conn.execute(table.insert(), json_value=JSON.NULL)
  1750. The :class:`_types.JSON` type supports a flag
  1751. :paramref:`_types.JSON.none_as_null` which when set to True will result
  1752. in the Python constant ``None`` evaluating to the value of SQL
  1753. NULL, and when set to False results in the Python constant
  1754. ``None`` evaluating to the value of JSON ``"null"``. The Python
  1755. value ``None`` may be used in conjunction with either
  1756. :attr:`_types.JSON.NULL` and :func:`.null` in order to indicate NULL
  1757. values, but care must be taken as to the value of the
  1758. :paramref:`_types.JSON.none_as_null` in these cases.
  1759. **Customizing the JSON Serializer**
  1760. The JSON serializer and deserializer used by :class:`_types.JSON`
  1761. defaults to
  1762. Python's ``json.dumps`` and ``json.loads`` functions; in the case of the
  1763. psycopg2 dialect, psycopg2 may be using its own custom loader function.
  1764. In order to affect the serializer / deserializer, they are currently
  1765. configurable at the :func:`_sa.create_engine` level via the
  1766. :paramref:`_sa.create_engine.json_serializer` and
  1767. :paramref:`_sa.create_engine.json_deserializer` parameters. For example,
  1768. to turn off ``ensure_ascii``::
  1769. engine = create_engine(
  1770. "sqlite://",
  1771. json_serializer=lambda obj: json.dumps(obj, ensure_ascii=False))
  1772. .. versionchanged:: 1.3.7
  1773. SQLite dialect's ``json_serializer`` and ``json_deserializer``
  1774. parameters renamed from ``_json_serializer`` and
  1775. ``_json_deserializer``.
  1776. .. seealso::
  1777. :class:`sqlalchemy.dialects.postgresql.JSON`
  1778. :class:`sqlalchemy.dialects.postgresql.JSONB`
  1779. :class:`sqlalchemy.dialects.mysql.JSON`
  1780. :class:`sqlalchemy.dialects.sqlite.JSON`
  1781. .. versionadded:: 1.1
  1782. """
  1783. __visit_name__ = "JSON"
  1784. hashable = False
  1785. NULL = util.symbol("JSON_NULL")
  1786. """Describe the json value of NULL.
  1787. This value is used to force the JSON value of ``"null"`` to be
  1788. used as the value. A value of Python ``None`` will be recognized
  1789. either as SQL NULL or JSON ``"null"``, based on the setting
  1790. of the :paramref:`_types.JSON.none_as_null` flag; the
  1791. :attr:`_types.JSON.NULL`
  1792. constant can be used to always resolve to JSON ``"null"`` regardless
  1793. of this setting. This is in contrast to the :func:`_expression.null`
  1794. construct,
  1795. which always resolves to SQL NULL. E.g.::
  1796. from sqlalchemy import null
  1797. from sqlalchemy.dialects.postgresql import JSON
  1798. # will *always* insert SQL NULL
  1799. obj1 = MyObject(json_value=null())
  1800. # will *always* insert JSON string "null"
  1801. obj2 = MyObject(json_value=JSON.NULL)
  1802. session.add_all([obj1, obj2])
  1803. session.commit()
  1804. In order to set JSON NULL as a default value for a column, the most
  1805. transparent method is to use :func:`_expression.text`::
  1806. Table(
  1807. 'my_table', metadata,
  1808. Column('json_data', JSON, default=text("'null'"))
  1809. )
  1810. While it is possible to use :attr:`_types.JSON.NULL` in this context, the
  1811. :attr:`_types.JSON.NULL` value will be returned as the value of the
  1812. column,
  1813. which in the context of the ORM or other repurposing of the default
  1814. value, may not be desirable. Using a SQL expression means the value
  1815. will be re-fetched from the database within the context of retrieving
  1816. generated defaults.
  1817. """
  1818. def __init__(self, none_as_null=False):
  1819. """Construct a :class:`_types.JSON` type.
  1820. :param none_as_null=False: if True, persist the value ``None`` as a
  1821. SQL NULL value, not the JSON encoding of ``null``. Note that
  1822. when this flag is False, the :func:`.null` construct can still
  1823. be used to persist a NULL value::
  1824. from sqlalchemy import null
  1825. conn.execute(table.insert(), data=null())
  1826. .. note::
  1827. :paramref:`_types.JSON.none_as_null` does **not** apply to the
  1828. values passed to :paramref:`_schema.Column.default` and
  1829. :paramref:`_schema.Column.server_default`; a value of ``None``
  1830. passed for these parameters means "no default present".
  1831. .. seealso::
  1832. :attr:`.types.JSON.NULL`
  1833. """
  1834. self.none_as_null = none_as_null
  1835. class JSONElementType(TypeEngine):
  1836. """Common function for index / path elements in a JSON expression."""
  1837. _integer = Integer()
  1838. _string = String()
  1839. def string_bind_processor(self, dialect):
  1840. return self._string._cached_bind_processor(dialect)
  1841. def string_literal_processor(self, dialect):
  1842. return self._string._cached_literal_processor(dialect)
  1843. def bind_processor(self, dialect):
  1844. int_processor = self._integer._cached_bind_processor(dialect)
  1845. string_processor = self.string_bind_processor(dialect)
  1846. def process(value):
  1847. if int_processor and isinstance(value, int):
  1848. value = int_processor(value)
  1849. elif string_processor and isinstance(value, util.string_types):
  1850. value = string_processor(value)
  1851. return value
  1852. return process
  1853. def literal_processor(self, dialect):
  1854. int_processor = self._integer._cached_literal_processor(dialect)
  1855. string_processor = self.string_literal_processor(dialect)
  1856. def process(value):
  1857. if int_processor and isinstance(value, int):
  1858. value = int_processor(value)
  1859. elif string_processor and isinstance(value, util.string_types):
  1860. value = string_processor(value)
  1861. return value
  1862. return process
  1863. class JSONIndexType(JSONElementType):
  1864. """Placeholder for the datatype of a JSON index value.
  1865. This allows execution-time processing of JSON index values
  1866. for special syntaxes.
  1867. """
  1868. class JSONIntIndexType(JSONIndexType):
  1869. """Placeholder for the datatype of a JSON index value.
  1870. This allows execution-time processing of JSON index values
  1871. for special syntaxes.
  1872. """
  1873. class JSONStrIndexType(JSONIndexType):
  1874. """Placeholder for the datatype of a JSON index value.
  1875. This allows execution-time processing of JSON index values
  1876. for special syntaxes.
  1877. """
  1878. class JSONPathType(JSONElementType):
  1879. """Placeholder type for JSON path operations.
  1880. This allows execution-time processing of a path-based
  1881. index value into a specific SQL syntax.
  1882. """
  1883. class Comparator(Indexable.Comparator, Concatenable.Comparator):
  1884. """Define comparison operations for :class:`_types.JSON`."""
  1885. def _setup_getitem(self, index):
  1886. if not isinstance(index, util.string_types) and isinstance(
  1887. index, compat.collections_abc.Sequence
  1888. ):
  1889. index = coercions.expect(
  1890. roles.BinaryElementRole,
  1891. index,
  1892. expr=self.expr,
  1893. operator=operators.json_path_getitem_op,
  1894. bindparam_type=JSON.JSONPathType,
  1895. )
  1896. operator = operators.json_path_getitem_op
  1897. else:
  1898. index = coercions.expect(
  1899. roles.BinaryElementRole,
  1900. index,
  1901. expr=self.expr,
  1902. operator=operators.json_getitem_op,
  1903. bindparam_type=JSON.JSONIntIndexType
  1904. if isinstance(index, int)
  1905. else JSON.JSONStrIndexType,
  1906. )
  1907. operator = operators.json_getitem_op
  1908. return operator, index, self.type
  1909. def as_boolean(self):
  1910. """Cast an indexed value as boolean.
  1911. e.g.::
  1912. stmt = select(
  1913. mytable.c.json_column['some_data'].as_boolean()
  1914. ).where(
  1915. mytable.c.json_column['some_data'].as_boolean() == True
  1916. )
  1917. .. versionadded:: 1.3.11
  1918. """
  1919. return self._binary_w_type(Boolean(), "as_boolean")
  1920. def as_string(self):
  1921. """Cast an indexed value as string.
  1922. e.g.::
  1923. stmt = select(
  1924. mytable.c.json_column['some_data'].as_string()
  1925. ).where(
  1926. mytable.c.json_column['some_data'].as_string() ==
  1927. 'some string'
  1928. )
  1929. .. versionadded:: 1.3.11
  1930. """
  1931. return self._binary_w_type(String(), "as_string")
  1932. def as_integer(self):
  1933. """Cast an indexed value as integer.
  1934. e.g.::
  1935. stmt = select(
  1936. mytable.c.json_column['some_data'].as_integer()
  1937. ).where(
  1938. mytable.c.json_column['some_data'].as_integer() == 5
  1939. )
  1940. .. versionadded:: 1.3.11
  1941. """
  1942. return self._binary_w_type(Integer(), "as_integer")
  1943. def as_float(self):
  1944. """Cast an indexed value as float.
  1945. e.g.::
  1946. stmt = select(
  1947. mytable.c.json_column['some_data'].as_float()
  1948. ).where(
  1949. mytable.c.json_column['some_data'].as_float() == 29.75
  1950. )
  1951. .. versionadded:: 1.3.11
  1952. """
  1953. return self._binary_w_type(Float(), "as_float")
  1954. def as_numeric(self, precision, scale, asdecimal=True):
  1955. """Cast an indexed value as numeric/decimal.
  1956. e.g.::
  1957. stmt = select(
  1958. mytable.c.json_column['some_data'].as_numeric(10, 6)
  1959. ).where(
  1960. mytable.c.
  1961. json_column['some_data'].as_numeric(10, 6) == 29.75
  1962. )
  1963. .. versionadded:: 1.4.0b2
  1964. """
  1965. return self._binary_w_type(
  1966. Numeric(precision, scale, asdecimal=asdecimal), "as_numeric"
  1967. )
  1968. def as_json(self):
  1969. """Cast an indexed value as JSON.
  1970. e.g.::
  1971. stmt = select(mytable.c.json_column['some_data'].as_json())
  1972. This is typically the default behavior of indexed elements in any
  1973. case.
  1974. Note that comparison of full JSON structures may not be
  1975. supported by all backends.
  1976. .. versionadded:: 1.3.11
  1977. """
  1978. return self.expr
  1979. def _binary_w_type(self, typ, method_name):
  1980. if not isinstance(
  1981. self.expr, elements.BinaryExpression
  1982. ) or self.expr.operator not in (
  1983. operators.json_getitem_op,
  1984. operators.json_path_getitem_op,
  1985. ):
  1986. raise exc.InvalidRequestError(
  1987. "The JSON cast operator JSON.%s() only works with a JSON "
  1988. "index expression e.g. col['q'].%s()"
  1989. % (method_name, method_name)
  1990. )
  1991. expr = self.expr._clone()
  1992. expr.type = typ
  1993. return expr
  1994. comparator_factory = Comparator
  1995. @property
  1996. def python_type(self):
  1997. return dict
  1998. @property
  1999. def should_evaluate_none(self):
  2000. """Alias of :attr:`_types.JSON.none_as_null`"""
  2001. return not self.none_as_null
  2002. @should_evaluate_none.setter
  2003. def should_evaluate_none(self, value):
  2004. self.none_as_null = not value
  2005. @util.memoized_property
  2006. def _str_impl(self):
  2007. return String(_expect_unicode=True)
  2008. def bind_processor(self, dialect):
  2009. string_process = self._str_impl.bind_processor(dialect)
  2010. json_serializer = dialect._json_serializer or json.dumps
  2011. def process(value):
  2012. if value is self.NULL:
  2013. value = None
  2014. elif isinstance(value, elements.Null) or (
  2015. value is None and self.none_as_null
  2016. ):
  2017. return None
  2018. serialized = json_serializer(value)
  2019. if string_process:
  2020. serialized = string_process(serialized)
  2021. return serialized
  2022. return process
  2023. def result_processor(self, dialect, coltype):
  2024. string_process = self._str_impl.result_processor(dialect, coltype)
  2025. json_deserializer = dialect._json_deserializer or json.loads
  2026. def process(value):
  2027. if value is None:
  2028. return None
  2029. if string_process:
  2030. value = string_process(value)
  2031. return json_deserializer(value)
  2032. return process
  2033. class ARRAY(SchemaEventTarget, Indexable, Concatenable, TypeEngine):
  2034. """Represent a SQL Array type.
  2035. .. note:: This type serves as the basis for all ARRAY operations.
  2036. However, currently **only the PostgreSQL backend has support for SQL
  2037. arrays in SQLAlchemy**. It is recommended to use the PostgreSQL-specific
  2038. :class:`sqlalchemy.dialects.postgresql.ARRAY` type directly when using
  2039. ARRAY types with PostgreSQL, as it provides additional operators
  2040. specific to that backend.
  2041. :class:`_types.ARRAY` is part of the Core in support of various SQL
  2042. standard functions such as :class:`_functions.array_agg`
  2043. which explicitly involve
  2044. arrays; however, with the exception of the PostgreSQL backend and possibly
  2045. some third-party dialects, no other SQLAlchemy built-in dialect has support
  2046. for this type.
  2047. An :class:`_types.ARRAY` type is constructed given the "type"
  2048. of element::
  2049. mytable = Table("mytable", metadata,
  2050. Column("data", ARRAY(Integer))
  2051. )
  2052. The above type represents an N-dimensional array,
  2053. meaning a supporting backend such as PostgreSQL will interpret values
  2054. with any number of dimensions automatically. To produce an INSERT
  2055. construct that passes in a 1-dimensional array of integers::
  2056. connection.execute(
  2057. mytable.insert(),
  2058. data=[1,2,3]
  2059. )
  2060. The :class:`_types.ARRAY` type can be constructed given a fixed number
  2061. of dimensions::
  2062. mytable = Table("mytable", metadata,
  2063. Column("data", ARRAY(Integer, dimensions=2))
  2064. )
  2065. Sending a number of dimensions is optional, but recommended if the
  2066. datatype is to represent arrays of more than one dimension. This number
  2067. is used:
  2068. * When emitting the type declaration itself to the database, e.g.
  2069. ``INTEGER[][]``
  2070. * When translating Python values to database values, and vice versa, e.g.
  2071. an ARRAY of :class:`.Unicode` objects uses this number to efficiently
  2072. access the string values inside of array structures without resorting
  2073. to per-row type inspection
  2074. * When used with the Python ``getitem`` accessor, the number of dimensions
  2075. serves to define the kind of type that the ``[]`` operator should
  2076. return, e.g. for an ARRAY of INTEGER with two dimensions::
  2077. >>> expr = table.c.column[5] # returns ARRAY(Integer, dimensions=1)
  2078. >>> expr = expr[6] # returns Integer
  2079. For 1-dimensional arrays, an :class:`_types.ARRAY` instance with no
  2080. dimension parameter will generally assume single-dimensional behaviors.
  2081. SQL expressions of type :class:`_types.ARRAY` have support for "index" and
  2082. "slice" behavior. The Python ``[]`` operator works normally here, given
  2083. integer indexes or slices. Arrays default to 1-based indexing.
  2084. The operator produces binary expression
  2085. constructs which will produce the appropriate SQL, both for
  2086. SELECT statements::
  2087. select(mytable.c.data[5], mytable.c.data[2:7])
  2088. as well as UPDATE statements when the :meth:`_expression.Update.values`
  2089. method
  2090. is used::
  2091. mytable.update().values({
  2092. mytable.c.data[5]: 7,
  2093. mytable.c.data[2:7]: [1, 2, 3]
  2094. })
  2095. The :class:`_types.ARRAY` type also provides for the operators
  2096. :meth:`.types.ARRAY.Comparator.any` and
  2097. :meth:`.types.ARRAY.Comparator.all`. The PostgreSQL-specific version of
  2098. :class:`_types.ARRAY` also provides additional operators.
  2099. .. versionadded:: 1.1.0
  2100. .. seealso::
  2101. :class:`sqlalchemy.dialects.postgresql.ARRAY`
  2102. """
  2103. __visit_name__ = "ARRAY"
  2104. _is_array = True
  2105. zero_indexes = False
  2106. """If True, Python zero-based indexes should be interpreted as one-based
  2107. on the SQL expression side."""
  2108. class Comparator(Indexable.Comparator, Concatenable.Comparator):
  2109. """Define comparison operations for :class:`_types.ARRAY`.
  2110. More operators are available on the dialect-specific form
  2111. of this type. See :class:`.postgresql.ARRAY.Comparator`.
  2112. """
  2113. def _setup_getitem(self, index):
  2114. if isinstance(index, slice):
  2115. return_type = self.type
  2116. if self.type.zero_indexes:
  2117. index = slice(index.start + 1, index.stop + 1, index.step)
  2118. slice_ = Slice(
  2119. index.start, index.stop, index.step, _name=self.expr.key
  2120. )
  2121. return operators.getitem, slice_, return_type
  2122. else:
  2123. if self.type.zero_indexes:
  2124. index += 1
  2125. if self.type.dimensions is None or self.type.dimensions == 1:
  2126. return_type = self.type.item_type
  2127. else:
  2128. adapt_kw = {"dimensions": self.type.dimensions - 1}
  2129. return_type = self.type.adapt(
  2130. self.type.__class__, **adapt_kw
  2131. )
  2132. return operators.getitem, index, return_type
  2133. def contains(self, *arg, **kw):
  2134. raise NotImplementedError(
  2135. "ARRAY.contains() not implemented for the base "
  2136. "ARRAY type; please use the dialect-specific ARRAY type"
  2137. )
  2138. @util.preload_module("sqlalchemy.sql.elements")
  2139. def any(self, other, operator=None):
  2140. """Return ``other operator ANY (array)`` clause.
  2141. Argument places are switched, because ANY requires array
  2142. expression to be on the right hand-side.
  2143. E.g.::
  2144. from sqlalchemy.sql import operators
  2145. conn.execute(
  2146. select(table.c.data).where(
  2147. table.c.data.any(7, operator=operators.lt)
  2148. )
  2149. )
  2150. :param other: expression to be compared
  2151. :param operator: an operator object from the
  2152. :mod:`sqlalchemy.sql.operators`
  2153. package, defaults to :func:`.operators.eq`.
  2154. .. seealso::
  2155. :func:`_expression.any_`
  2156. :meth:`.types.ARRAY.Comparator.all`
  2157. """
  2158. elements = util.preloaded.sql_elements
  2159. operator = operator if operator else operators.eq
  2160. # send plain BinaryExpression so that negate remains at None,
  2161. # leading to NOT expr for negation.
  2162. return elements.BinaryExpression(
  2163. coercions.expect(roles.ExpressionElementRole, other),
  2164. elements.CollectionAggregate._create_any(self.expr),
  2165. operator,
  2166. )
  2167. @util.preload_module("sqlalchemy.sql.elements")
  2168. def all(self, other, operator=None):
  2169. """Return ``other operator ALL (array)`` clause.
  2170. Argument places are switched, because ALL requires array
  2171. expression to be on the right hand-side.
  2172. E.g.::
  2173. from sqlalchemy.sql import operators
  2174. conn.execute(
  2175. select(table.c.data).where(
  2176. table.c.data.all(7, operator=operators.lt)
  2177. )
  2178. )
  2179. :param other: expression to be compared
  2180. :param operator: an operator object from the
  2181. :mod:`sqlalchemy.sql.operators`
  2182. package, defaults to :func:`.operators.eq`.
  2183. .. seealso::
  2184. :func:`_expression.all_`
  2185. :meth:`.types.ARRAY.Comparator.any`
  2186. """
  2187. elements = util.preloaded.sql_elements
  2188. operator = operator if operator else operators.eq
  2189. # send plain BinaryExpression so that negate remains at None,
  2190. # leading to NOT expr for negation.
  2191. return elements.BinaryExpression(
  2192. coercions.expect(roles.ExpressionElementRole, other),
  2193. elements.CollectionAggregate._create_all(self.expr),
  2194. operator,
  2195. )
  2196. comparator_factory = Comparator
  2197. def __init__(
  2198. self, item_type, as_tuple=False, dimensions=None, zero_indexes=False
  2199. ):
  2200. """Construct an :class:`_types.ARRAY`.
  2201. E.g.::
  2202. Column('myarray', ARRAY(Integer))
  2203. Arguments are:
  2204. :param item_type: The data type of items of this array. Note that
  2205. dimensionality is irrelevant here, so multi-dimensional arrays like
  2206. ``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as
  2207. ``ARRAY(ARRAY(Integer))`` or such.
  2208. :param as_tuple=False: Specify whether return results
  2209. should be converted to tuples from lists. This parameter is
  2210. not generally needed as a Python list corresponds well
  2211. to a SQL array.
  2212. :param dimensions: if non-None, the ARRAY will assume a fixed
  2213. number of dimensions. This impacts how the array is declared
  2214. on the database, how it goes about interpreting Python and
  2215. result values, as well as how expression behavior in conjunction
  2216. with the "getitem" operator works. See the description at
  2217. :class:`_types.ARRAY` for additional detail.
  2218. :param zero_indexes=False: when True, index values will be converted
  2219. between Python zero-based and SQL one-based indexes, e.g.
  2220. a value of one will be added to all index values before passing
  2221. to the database.
  2222. """
  2223. if isinstance(item_type, ARRAY):
  2224. raise ValueError(
  2225. "Do not nest ARRAY types; ARRAY(basetype) "
  2226. "handles multi-dimensional arrays of basetype"
  2227. )
  2228. if isinstance(item_type, type):
  2229. item_type = item_type()
  2230. self.item_type = item_type
  2231. self.as_tuple = as_tuple
  2232. self.dimensions = dimensions
  2233. self.zero_indexes = zero_indexes
  2234. @property
  2235. def hashable(self):
  2236. return self.as_tuple
  2237. @property
  2238. def python_type(self):
  2239. return list
  2240. def compare_values(self, x, y):
  2241. return x == y
  2242. def _set_parent(self, column, outer=False, **kw):
  2243. """Support SchemaEventTarget"""
  2244. if not outer and isinstance(self.item_type, SchemaEventTarget):
  2245. self.item_type._set_parent(column, **kw)
  2246. def _set_parent_with_dispatch(self, parent):
  2247. """Support SchemaEventTarget"""
  2248. super(ARRAY, self)._set_parent_with_dispatch(parent, outer=True)
  2249. if isinstance(self.item_type, SchemaEventTarget):
  2250. self.item_type._set_parent_with_dispatch(parent)
  2251. class TupleType(TypeEngine):
  2252. """represent the composite type of a Tuple."""
  2253. _is_tuple_type = True
  2254. def __init__(self, *types):
  2255. self._fully_typed = NULLTYPE not in types
  2256. self.types = types
  2257. def _resolve_values_to_types(self, value):
  2258. if self._fully_typed:
  2259. return self
  2260. else:
  2261. return TupleType(
  2262. *[
  2263. _resolve_value_to_type(elem) if typ is NULLTYPE else typ
  2264. for typ, elem in zip(self.types, value)
  2265. ]
  2266. )
  2267. def result_processor(self, dialect, coltype):
  2268. raise NotImplementedError(
  2269. "The tuple type does not support being fetched "
  2270. "as a column in a result row."
  2271. )
  2272. class REAL(Float):
  2273. """The SQL REAL type."""
  2274. __visit_name__ = "REAL"
  2275. class FLOAT(Float):
  2276. """The SQL FLOAT type."""
  2277. __visit_name__ = "FLOAT"
  2278. class NUMERIC(Numeric):
  2279. """The SQL NUMERIC type."""
  2280. __visit_name__ = "NUMERIC"
  2281. class DECIMAL(Numeric):
  2282. """The SQL DECIMAL type."""
  2283. __visit_name__ = "DECIMAL"
  2284. class INTEGER(Integer):
  2285. """The SQL INT or INTEGER type."""
  2286. __visit_name__ = "INTEGER"
  2287. INT = INTEGER
  2288. class SMALLINT(SmallInteger):
  2289. """The SQL SMALLINT type."""
  2290. __visit_name__ = "SMALLINT"
  2291. class BIGINT(BigInteger):
  2292. """The SQL BIGINT type."""
  2293. __visit_name__ = "BIGINT"
  2294. class TIMESTAMP(DateTime):
  2295. """The SQL TIMESTAMP type.
  2296. :class:`_types.TIMESTAMP` datatypes have support for timezone
  2297. storage on some backends, such as PostgreSQL and Oracle. Use the
  2298. :paramref:`~types.TIMESTAMP.timezone` argument in order to enable
  2299. "TIMESTAMP WITH TIMEZONE" for these backends.
  2300. """
  2301. __visit_name__ = "TIMESTAMP"
  2302. def __init__(self, timezone=False):
  2303. """Construct a new :class:`_types.TIMESTAMP`.
  2304. :param timezone: boolean. Indicates that the TIMESTAMP type should
  2305. enable timezone support, if available on the target database.
  2306. On a per-dialect basis is similar to "TIMESTAMP WITH TIMEZONE".
  2307. If the target database does not support timezones, this flag is
  2308. ignored.
  2309. """
  2310. super(TIMESTAMP, self).__init__(timezone=timezone)
  2311. def get_dbapi_type(self, dbapi):
  2312. return dbapi.TIMESTAMP
  2313. class DATETIME(DateTime):
  2314. """The SQL DATETIME type."""
  2315. __visit_name__ = "DATETIME"
  2316. class DATE(Date):
  2317. """The SQL DATE type."""
  2318. __visit_name__ = "DATE"
  2319. class TIME(Time):
  2320. """The SQL TIME type."""
  2321. __visit_name__ = "TIME"
  2322. class TEXT(Text):
  2323. """The SQL TEXT type."""
  2324. __visit_name__ = "TEXT"
  2325. class CLOB(Text):
  2326. """The CLOB type.
  2327. This type is found in Oracle and Informix.
  2328. """
  2329. __visit_name__ = "CLOB"
  2330. class VARCHAR(String):
  2331. """The SQL VARCHAR type."""
  2332. __visit_name__ = "VARCHAR"
  2333. class NVARCHAR(Unicode):
  2334. """The SQL NVARCHAR type."""
  2335. __visit_name__ = "NVARCHAR"
  2336. class CHAR(String):
  2337. """The SQL CHAR type."""
  2338. __visit_name__ = "CHAR"
  2339. class NCHAR(Unicode):
  2340. """The SQL NCHAR type."""
  2341. __visit_name__ = "NCHAR"
  2342. class BLOB(LargeBinary):
  2343. """The SQL BLOB type."""
  2344. __visit_name__ = "BLOB"
  2345. class BINARY(_Binary):
  2346. """The SQL BINARY type."""
  2347. __visit_name__ = "BINARY"
  2348. class VARBINARY(_Binary):
  2349. """The SQL VARBINARY type."""
  2350. __visit_name__ = "VARBINARY"
  2351. class BOOLEAN(Boolean):
  2352. """The SQL BOOLEAN type."""
  2353. __visit_name__ = "BOOLEAN"
  2354. class NullType(TypeEngine):
  2355. """An unknown type.
  2356. :class:`.NullType` is used as a default type for those cases where
  2357. a type cannot be determined, including:
  2358. * During table reflection, when the type of a column is not recognized
  2359. by the :class:`.Dialect`
  2360. * When constructing SQL expressions using plain Python objects of
  2361. unknown types (e.g. ``somecolumn == my_special_object``)
  2362. * When a new :class:`_schema.Column` is created,
  2363. and the given type is passed
  2364. as ``None`` or is not passed at all.
  2365. The :class:`.NullType` can be used within SQL expression invocation
  2366. without issue, it just has no behavior either at the expression
  2367. construction level or at the bind-parameter/result processing level.
  2368. :class:`.NullType` will result in a :exc:`.CompileError` if the compiler
  2369. is asked to render the type itself, such as if it is used in a
  2370. :func:`.cast` operation or within a schema creation operation such as that
  2371. invoked by :meth:`_schema.MetaData.create_all` or the
  2372. :class:`.CreateTable`
  2373. construct.
  2374. """
  2375. __visit_name__ = "null"
  2376. _isnull = True
  2377. hashable = False
  2378. def literal_processor(self, dialect):
  2379. def process(value):
  2380. raise exc.CompileError(
  2381. "Don't know how to render literal SQL value: %r" % value
  2382. )
  2383. return process
  2384. class Comparator(TypeEngine.Comparator):
  2385. def _adapt_expression(self, op, other_comparator):
  2386. if isinstance(
  2387. other_comparator, NullType.Comparator
  2388. ) or not operators.is_commutative(op):
  2389. return op, self.expr.type
  2390. else:
  2391. return other_comparator._adapt_expression(op, self)
  2392. comparator_factory = Comparator
  2393. class TableValueType(HasCacheKey, TypeEngine):
  2394. """Refers to a table value type."""
  2395. _is_table_value = True
  2396. _traverse_internals = [
  2397. ("_elements", InternalTraversal.dp_clauseelement_list),
  2398. ]
  2399. def __init__(self, *elements):
  2400. self._elements = [
  2401. coercions.expect(roles.StrAsPlainColumnRole, elem)
  2402. for elem in elements
  2403. ]
  2404. class MatchType(Boolean):
  2405. """Refers to the return type of the MATCH operator.
  2406. As the :meth:`.ColumnOperators.match` is probably the most open-ended
  2407. operator in generic SQLAlchemy Core, we can't assume the return type
  2408. at SQL evaluation time, as MySQL returns a floating point, not a boolean,
  2409. and other backends might do something different. So this type
  2410. acts as a placeholder, currently subclassing :class:`.Boolean`.
  2411. The type allows dialects to inject result-processing functionality
  2412. if needed, and on MySQL will return floating-point values.
  2413. .. versionadded:: 1.0.0
  2414. """
  2415. NULLTYPE = NullType()
  2416. BOOLEANTYPE = Boolean()
  2417. STRINGTYPE = String()
  2418. INTEGERTYPE = Integer()
  2419. MATCHTYPE = MatchType()
  2420. TABLEVALUE = TableValueType()
  2421. _type_map = {
  2422. int: Integer(),
  2423. float: Float(),
  2424. bool: BOOLEANTYPE,
  2425. decimal.Decimal: Numeric(),
  2426. dt.date: Date(),
  2427. dt.datetime: DateTime(),
  2428. dt.time: Time(),
  2429. dt.timedelta: Interval(),
  2430. util.NoneType: NULLTYPE,
  2431. }
  2432. if util.py3k:
  2433. _type_map[bytes] = LargeBinary() # noqa
  2434. _type_map[str] = Unicode()
  2435. else:
  2436. _type_map[unicode] = Unicode() # noqa
  2437. _type_map[str] = String()
  2438. _type_map_get = _type_map.get
  2439. def _resolve_value_to_type(value):
  2440. _result_type = _type_map_get(type(value), False)
  2441. if _result_type is False:
  2442. # use inspect() to detect SQLAlchemy built-in
  2443. # objects.
  2444. insp = inspection.inspect(value, False)
  2445. if (
  2446. insp is not None
  2447. and
  2448. # foil mock.Mock() and other impostors by ensuring
  2449. # the inspection target itself self-inspects
  2450. insp.__class__ in inspection._registrars
  2451. ):
  2452. raise exc.ArgumentError(
  2453. "Object %r is not legal as a SQL literal value" % value
  2454. )
  2455. return NULLTYPE
  2456. else:
  2457. return _result_type
  2458. # back-assign to type_api
  2459. type_api.BOOLEANTYPE = BOOLEANTYPE
  2460. type_api.STRINGTYPE = STRINGTYPE
  2461. type_api.INTEGERTYPE = INTEGERTYPE
  2462. type_api.NULLTYPE = NULLTYPE
  2463. type_api.MATCHTYPE = MATCHTYPE
  2464. type_api.INDEXABLE = Indexable
  2465. type_api.TABLEVALUE = TABLEVALUE
  2466. type_api._resolve_value_to_type = _resolve_value_to_type
  2467. TypeEngine.Comparator.BOOLEANTYPE = BOOLEANTYPE