Coverage for /home/martinb/.local/share/virtualenvs/camcops/lib/python3.6/site-packages/sqlalchemy/sql/sqltypes.py : 44%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# sql/sqltypes.py
2# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: http://www.opensource.org/licenses/mit-license.php
8"""SQL specific types.
10"""
12import codecs
13import datetime as dt
14import decimal
15import json
17from . import elements
18from . import operators
19from . import type_api
20from .base import _bind_or_error
21from .base import NO_ARG
22from .base import SchemaEventTarget
23from .elements import _defer_name
24from .elements import _literal_as_binds
25from .elements import quoted_name
26from .elements import Slice
27from .elements import TypeCoerce as type_coerce # noqa
28from .type_api import Emulated
29from .type_api import NativeForEmulated # noqa
30from .type_api import to_instance
31from .type_api import TypeDecorator
32from .type_api import TypeEngine
33from .type_api import Variant
34from .. import event
35from .. import exc
36from .. import inspection
37from .. import processors
38from .. import util
39from ..util import compat
40from ..util import pickle
43if util.jython:
44 import array
47class _LookupExpressionAdapter(object):
49 """Mixin expression adaptations based on lookup tables.
51 These rules are currently used by the numeric, integer and date types
52 which have detailed cross-expression coercion rules.
54 """
56 @property
57 def _expression_adaptations(self):
58 raise NotImplementedError()
60 class Comparator(TypeEngine.Comparator):
61 _blank_dict = util.immutabledict()
63 def _adapt_expression(self, op, other_comparator):
64 othertype = other_comparator.type._type_affinity
65 lookup = self.type._expression_adaptations.get(
66 op, self._blank_dict
67 ).get(othertype, self.type)
68 if lookup is othertype:
69 return (op, other_comparator.type)
70 elif lookup is self.type._type_affinity:
71 return (op, self.type)
72 else:
73 return (op, to_instance(lookup))
75 comparator_factory = Comparator
78class Concatenable(object):
80 """A mixin that marks a type as supporting 'concatenation',
81 typically strings."""
83 class Comparator(TypeEngine.Comparator):
84 def _adapt_expression(self, op, other_comparator):
85 if op is operators.add and isinstance(
86 other_comparator,
87 (Concatenable.Comparator, NullType.Comparator),
88 ):
89 return operators.concat_op, self.expr.type
90 else:
91 return super(Concatenable.Comparator, self)._adapt_expression(
92 op, other_comparator
93 )
95 comparator_factory = Comparator
98class Indexable(object):
99 """A mixin that marks a type as supporting indexing operations,
100 such as array or JSON structures.
103 .. versionadded:: 1.1.0
106 """
108 class Comparator(TypeEngine.Comparator):
109 def _setup_getitem(self, index):
110 raise NotImplementedError()
112 def __getitem__(self, index):
113 (
114 adjusted_op,
115 adjusted_right_expr,
116 result_type,
117 ) = self._setup_getitem(index)
118 return self.operate(
119 adjusted_op, adjusted_right_expr, result_type=result_type
120 )
122 comparator_factory = Comparator
125class String(Concatenable, TypeEngine):
127 """The base for all string and character types.
129 In SQL, corresponds to VARCHAR. Can also take Python unicode objects
130 and encode to the database's encoding in bind params (and the reverse for
131 result sets.)
133 The `length` field is usually required when the `String` type is
134 used within a CREATE TABLE statement, as VARCHAR requires a length
135 on most databases.
137 """
139 __visit_name__ = "string"
141 @util.deprecated_params(
142 convert_unicode=(
143 "1.3",
144 "The :paramref:`.String.convert_unicode` parameter is deprecated "
145 "and will be removed in a future release. All modern DBAPIs "
146 "now support Python Unicode directly and this parameter is "
147 "unnecessary.",
148 ),
149 unicode_error=(
150 "1.3",
151 "The :paramref:`.String.unicode_errors` parameter is deprecated "
152 "and will be removed in a future release. This parameter is "
153 "unnecessary for modern Python DBAPIs and degrades performance "
154 "significantly.",
155 ),
156 )
157 def __init__(
158 self,
159 length=None,
160 collation=None,
161 convert_unicode=False,
162 unicode_error=None,
163 _warn_on_bytestring=False,
164 _expect_unicode=False,
165 ):
166 """
167 Create a string-holding type.
169 :param length: optional, a length for the column for use in
170 DDL and CAST expressions. May be safely omitted if no ``CREATE
171 TABLE`` will be issued. Certain databases may require a
172 ``length`` for use in DDL, and will raise an exception when
173 the ``CREATE TABLE`` DDL is issued if a ``VARCHAR``
174 with no length is included. Whether the value is
175 interpreted as bytes or characters is database specific.
177 :param collation: Optional, a column-level collation for
178 use in DDL and CAST expressions. Renders using the
179 COLLATE keyword supported by SQLite, MySQL, and PostgreSQL.
180 E.g.::
182 >>> from sqlalchemy import cast, select, String
183 >>> print(select([cast('some string', String(collation='utf8'))]))
184 SELECT CAST(:param_1 AS VARCHAR COLLATE utf8) AS anon_1
186 :param convert_unicode: When set to ``True``, the
187 :class:`.String` type will assume that
188 input is to be passed as Python Unicode objects under Python 2,
189 and results returned as Python Unicode objects.
190 In the rare circumstance that the DBAPI does not support
191 Python unicode under Python 2, SQLAlchemy will use its own
192 encoder/decoder functionality on strings, referring to the
193 value of the :paramref:`_sa.create_engine.encoding` parameter
194 parameter passed to :func:`_sa.create_engine` as the encoding.
196 For the extremely rare case that Python Unicode
197 is to be encoded/decoded by SQLAlchemy on a backend
198 that *does* natively support Python Unicode,
199 the string value ``"force"`` can be passed here which will
200 cause SQLAlchemy's encode/decode services to be
201 used unconditionally.
203 .. note::
205 SQLAlchemy's unicode-conversion flags and features only apply
206 to Python 2; in Python 3, all string objects are Unicode objects.
207 For this reason, as well as the fact that virtually all modern
208 DBAPIs now support Unicode natively even under Python 2,
209 the :paramref:`.String.convert_unicode` flag is inherently a
210 legacy feature.
212 .. note::
214 In the vast majority of cases, the :class:`.Unicode` or
215 :class:`.UnicodeText` datatypes should be used for a
216 :class:`_schema.Column` that expects to store non-ascii data.
217 These
218 datatypes will ensure that the correct types are used on the
219 database side as well as set up the correct Unicode behaviors
220 under Python 2.
222 .. seealso::
224 :paramref:`_sa.create_engine.convert_unicode` -
225 :class:`_engine.Engine`-wide parameter
227 :param unicode_error: Optional, a method to use to handle Unicode
228 conversion errors. Behaves like the ``errors`` keyword argument to
229 the standard library's ``string.decode()`` functions, requires
230 that :paramref:`.String.convert_unicode` is set to
231 ``"force"``
233 """
234 if unicode_error is not None and convert_unicode != "force":
235 raise exc.ArgumentError(
236 "convert_unicode must be 'force' " "when unicode_error is set."
237 )
239 self.length = length
240 self.collation = collation
241 self._expect_unicode = convert_unicode or _expect_unicode
242 self._expect_unicode_error = unicode_error
244 self._warn_on_bytestring = _warn_on_bytestring
246 def literal_processor(self, dialect):
247 def process(value):
248 value = value.replace("'", "''")
250 if dialect.identifier_preparer._double_percents:
251 value = value.replace("%", "%%")
253 return "'%s'" % value
255 return process
257 def bind_processor(self, dialect):
258 if self._expect_unicode or dialect.convert_unicode:
259 if (
260 dialect.supports_unicode_binds
261 and self._expect_unicode != "force"
262 ):
263 if self._warn_on_bytestring:
265 def process(value):
266 if isinstance(value, util.binary_type):
267 util.warn_limited(
268 "Unicode type received non-unicode "
269 "bind param value %r.",
270 (util.ellipses_string(value),),
271 )
272 return value
274 return process
275 else:
276 return None
277 else:
278 encoder = codecs.getencoder(dialect.encoding)
279 warn_on_bytestring = self._warn_on_bytestring
281 def process(value):
282 if isinstance(value, util.text_type):
283 return encoder(value, self._expect_unicode_error)[0]
284 elif warn_on_bytestring and value is not None:
285 util.warn_limited(
286 "Unicode type received non-unicode bind "
287 "param value %r.",
288 (util.ellipses_string(value),),
289 )
290 return value
292 return process
293 else:
294 return None
296 def result_processor(self, dialect, coltype):
297 wants_unicode = self._expect_unicode or dialect.convert_unicode
298 needs_convert = wants_unicode and (
299 dialect.returns_unicode_strings is not True
300 or self._expect_unicode in ("force", "force_nocheck")
301 )
302 needs_isinstance = (
303 needs_convert
304 and dialect.returns_unicode_strings
305 and self._expect_unicode != "force_nocheck"
306 )
307 if needs_convert:
308 if needs_isinstance:
309 return processors.to_conditional_unicode_processor_factory(
310 dialect.encoding, self._expect_unicode_error
311 )
312 else:
313 return processors.to_unicode_processor_factory(
314 dialect.encoding, self._expect_unicode_error
315 )
316 else:
317 return None
319 @property
320 def python_type(self):
321 if self._expect_unicode:
322 return util.text_type
323 else:
324 return str
326 def get_dbapi_type(self, dbapi):
327 return dbapi.STRING
329 @classmethod
330 def _warn_deprecated_unicode(cls):
331 util.warn_deprecated(
332 "The convert_unicode on Engine and String as well as the "
333 "unicode_error flag on String are deprecated. All modern "
334 "DBAPIs now support Python Unicode natively under Python 2, and "
335 "under Python 3 all strings are inherently Unicode. These flags "
336 "will be removed in a future release."
337 )
340class Text(String):
342 """A variably sized string type.
344 In SQL, usually corresponds to CLOB or TEXT. Can also take Python
345 unicode objects and encode to the database's encoding in bind
346 params (and the reverse for result sets.) In general, TEXT objects
347 do not have a length; while some databases will accept a length
348 argument here, it will be rejected by others.
350 """
352 __visit_name__ = "text"
355class Unicode(String):
357 """A variable length Unicode string type.
359 The :class:`.Unicode` type is a :class:`.String` subclass
360 that assumes input and output as Python ``unicode`` data,
361 and in that regard is equivalent to the usage of the
362 ``convert_unicode`` flag with the :class:`.String` type.
363 However, unlike plain :class:`.String`, it also implies an
364 underlying column type that is explicitly supporting of non-ASCII
365 data, such as ``NVARCHAR`` on Oracle and SQL Server.
366 This can impact the output of ``CREATE TABLE`` statements
367 and ``CAST`` functions at the dialect level, and can
368 also affect the handling of bound parameters in some
369 specific DBAPI scenarios.
371 The encoding used by the :class:`.Unicode` type is usually
372 determined by the DBAPI itself; most modern DBAPIs
373 feature support for Python ``unicode`` objects as bound
374 values and result set values, and the encoding should
375 be configured as detailed in the notes for the target
376 DBAPI in the :ref:`dialect_toplevel` section.
378 For those DBAPIs which do not support, or are not configured
379 to accommodate Python ``unicode`` objects
380 directly, SQLAlchemy does the encoding and decoding
381 outside of the DBAPI. The encoding in this scenario
382 is determined by the ``encoding`` flag passed to
383 :func:`_sa.create_engine`.
385 When using the :class:`.Unicode` type, it is only appropriate
386 to pass Python ``unicode`` objects, and not plain ``str``.
387 If a plain ``str`` is passed under Python 2, a warning
388 is emitted. If you notice your application emitting these warnings but
389 you're not sure of the source of them, the Python
390 ``warnings`` filter, documented at
391 http://docs.python.org/library/warnings.html,
392 can be used to turn these warnings into exceptions
393 which will illustrate a stack trace::
395 import warnings
396 warnings.simplefilter('error')
398 For an application that wishes to pass plain bytestrings
399 and Python ``unicode`` objects to the ``Unicode`` type
400 equally, the bytestrings must first be decoded into
401 unicode. The recipe at :ref:`coerce_to_unicode` illustrates
402 how this is done.
404 .. seealso::
406 :class:`.UnicodeText` - unlengthed textual counterpart
407 to :class:`.Unicode`.
409 """
411 __visit_name__ = "unicode"
413 def __init__(self, length=None, **kwargs):
414 """
415 Create a :class:`.Unicode` object.
417 Parameters are the same as that of :class:`.String`,
418 with the exception that ``convert_unicode``
419 defaults to ``True``.
421 """
422 kwargs.setdefault("_expect_unicode", True)
423 kwargs.setdefault("_warn_on_bytestring", True)
424 super(Unicode, self).__init__(length=length, **kwargs)
427class UnicodeText(Text):
429 """An unbounded-length Unicode string type.
431 See :class:`.Unicode` for details on the unicode
432 behavior of this object.
434 Like :class:`.Unicode`, usage the :class:`.UnicodeText` type implies a
435 unicode-capable type being used on the backend, such as
436 ``NCLOB``, ``NTEXT``.
438 """
440 __visit_name__ = "unicode_text"
442 def __init__(self, length=None, **kwargs):
443 """
444 Create a Unicode-converting Text type.
446 Parameters are the same as that of :class:`_expression.TextClause`,
447 with the exception that ``convert_unicode``
448 defaults to ``True``.
450 """
451 kwargs.setdefault("_expect_unicode", True)
452 kwargs.setdefault("_warn_on_bytestring", True)
453 super(UnicodeText, self).__init__(length=length, **kwargs)
455 def _warn_deprecated_unicode(self):
456 pass
459class Integer(_LookupExpressionAdapter, TypeEngine):
461 """A type for ``int`` integers."""
463 __visit_name__ = "integer"
465 def get_dbapi_type(self, dbapi):
466 return dbapi.NUMBER
468 @property
469 def python_type(self):
470 return int
472 def literal_processor(self, dialect):
473 def process(value):
474 return str(value)
476 return process
478 @util.memoized_property
479 def _expression_adaptations(self):
480 # TODO: need a dictionary object that will
481 # handle operators generically here, this is incomplete
482 return {
483 operators.add: {
484 Date: Date,
485 Integer: self.__class__,
486 Numeric: Numeric,
487 },
488 operators.mul: {
489 Interval: Interval,
490 Integer: self.__class__,
491 Numeric: Numeric,
492 },
493 operators.div: {Integer: self.__class__, Numeric: Numeric},
494 operators.truediv: {Integer: self.__class__, Numeric: Numeric},
495 operators.sub: {Integer: self.__class__, Numeric: Numeric},
496 }
499class SmallInteger(Integer):
501 """A type for smaller ``int`` integers.
503 Typically generates a ``SMALLINT`` in DDL, and otherwise acts like
504 a normal :class:`.Integer` on the Python side.
506 """
508 __visit_name__ = "small_integer"
511class BigInteger(Integer):
513 """A type for bigger ``int`` integers.
515 Typically generates a ``BIGINT`` in DDL, and otherwise acts like
516 a normal :class:`.Integer` on the Python side.
518 """
520 __visit_name__ = "big_integer"
523class Numeric(_LookupExpressionAdapter, TypeEngine):
525 """A type for fixed precision numbers, such as ``NUMERIC`` or ``DECIMAL``.
527 This type returns Python ``decimal.Decimal`` objects by default, unless
528 the :paramref:`.Numeric.asdecimal` flag is set to False, in which case
529 they are coerced to Python ``float`` objects.
531 .. note::
533 The :class:`.Numeric` type is designed to receive data from a database
534 type that is explicitly known to be a decimal type
535 (e.g. ``DECIMAL``, ``NUMERIC``, others) and not a floating point
536 type (e.g. ``FLOAT``, ``REAL``, others).
537 If the database column on the server is in fact a floating-point type
538 type, such as ``FLOAT`` or ``REAL``, use the :class:`.Float`
539 type or a subclass, otherwise numeric coercion between
540 ``float``/``Decimal`` may or may not function as expected.
542 .. note::
544 The Python ``decimal.Decimal`` class is generally slow
545 performing; cPython 3.3 has now switched to use the `cdecimal
546 <http://pypi.python.org/pypi/cdecimal/>`_ library natively. For
547 older Python versions, the ``cdecimal`` library can be patched
548 into any application where it will replace the ``decimal``
549 library fully, however this needs to be applied globally and
550 before any other modules have been imported, as follows::
552 import sys
553 import cdecimal
554 sys.modules["decimal"] = cdecimal
556 Note that the ``cdecimal`` and ``decimal`` libraries are **not
557 compatible with each other**, so patching ``cdecimal`` at the
558 global level is the only way it can be used effectively with
559 various DBAPIs that hardcode to import the ``decimal`` library.
561 """
563 __visit_name__ = "numeric"
565 _default_decimal_return_scale = 10
567 def __init__(
568 self,
569 precision=None,
570 scale=None,
571 decimal_return_scale=None,
572 asdecimal=True,
573 ):
574 """
575 Construct a Numeric.
577 :param precision: the numeric precision for use in DDL ``CREATE
578 TABLE``.
580 :param scale: the numeric scale for use in DDL ``CREATE TABLE``.
582 :param asdecimal: default True. Return whether or not
583 values should be sent as Python Decimal objects, or
584 as floats. Different DBAPIs send one or the other based on
585 datatypes - the Numeric type will ensure that return values
586 are one or the other across DBAPIs consistently.
588 :param decimal_return_scale: Default scale to use when converting
589 from floats to Python decimals. Floating point values will typically
590 be much longer due to decimal inaccuracy, and most floating point
591 database types don't have a notion of "scale", so by default the
592 float type looks for the first ten decimal places when converting.
593 Specifying this value will override that length. Types which
594 do include an explicit ".scale" value, such as the base
595 :class:`.Numeric` as well as the MySQL float types, will use the
596 value of ".scale" as the default for decimal_return_scale, if not
597 otherwise specified.
599 .. versionadded:: 0.9.0
601 When using the ``Numeric`` type, care should be taken to ensure
602 that the asdecimal setting is appropriate for the DBAPI in use -
603 when Numeric applies a conversion from Decimal->float or float->
604 Decimal, this conversion incurs an additional performance overhead
605 for all result columns received.
607 DBAPIs that return Decimal natively (e.g. psycopg2) will have
608 better accuracy and higher performance with a setting of ``True``,
609 as the native translation to Decimal reduces the amount of floating-
610 point issues at play, and the Numeric type itself doesn't need
611 to apply any further conversions. However, another DBAPI which
612 returns floats natively *will* incur an additional conversion
613 overhead, and is still subject to floating point data loss - in
614 which case ``asdecimal=False`` will at least remove the extra
615 conversion overhead.
617 """
618 self.precision = precision
619 self.scale = scale
620 self.decimal_return_scale = decimal_return_scale
621 self.asdecimal = asdecimal
623 @property
624 def _effective_decimal_return_scale(self):
625 if self.decimal_return_scale is not None:
626 return self.decimal_return_scale
627 elif getattr(self, "scale", None) is not None:
628 return self.scale
629 else:
630 return self._default_decimal_return_scale
632 def get_dbapi_type(self, dbapi):
633 return dbapi.NUMBER
635 def literal_processor(self, dialect):
636 def process(value):
637 return str(value)
639 return process
641 @property
642 def python_type(self):
643 if self.asdecimal:
644 return decimal.Decimal
645 else:
646 return float
648 def bind_processor(self, dialect):
649 if dialect.supports_native_decimal:
650 return None
651 else:
652 return processors.to_float
654 def result_processor(self, dialect, coltype):
655 if self.asdecimal:
656 if dialect.supports_native_decimal:
657 # we're a "numeric", DBAPI will give us Decimal directly
658 return None
659 else:
660 util.warn(
661 "Dialect %s+%s does *not* support Decimal "
662 "objects natively, and SQLAlchemy must "
663 "convert from floating point - rounding "
664 "errors and other issues may occur. Please "
665 "consider storing Decimal numbers as strings "
666 "or integers on this platform for lossless "
667 "storage." % (dialect.name, dialect.driver)
668 )
670 # we're a "numeric", DBAPI returns floats, convert.
671 return processors.to_decimal_processor_factory(
672 decimal.Decimal,
673 self.scale
674 if self.scale is not None
675 else self._default_decimal_return_scale,
676 )
677 else:
678 if dialect.supports_native_decimal:
679 return processors.to_float
680 else:
681 return None
683 @util.memoized_property
684 def _expression_adaptations(self):
685 return {
686 operators.mul: {
687 Interval: Interval,
688 Numeric: self.__class__,
689 Integer: self.__class__,
690 },
691 operators.div: {Numeric: self.__class__, Integer: self.__class__},
692 operators.truediv: {
693 Numeric: self.__class__,
694 Integer: self.__class__,
695 },
696 operators.add: {Numeric: self.__class__, Integer: self.__class__},
697 operators.sub: {Numeric: self.__class__, Integer: self.__class__},
698 }
701class Float(Numeric):
703 """Type representing floating point types, such as ``FLOAT`` or ``REAL``.
705 This type returns Python ``float`` objects by default, unless the
706 :paramref:`.Float.asdecimal` flag is set to True, in which case they
707 are coerced to ``decimal.Decimal`` objects.
709 .. note::
711 The :class:`.Float` type is designed to receive data from a database
712 type that is explicitly known to be a floating point type
713 (e.g. ``FLOAT``, ``REAL``, others)
714 and not a decimal type (e.g. ``DECIMAL``, ``NUMERIC``, others).
715 If the database column on the server is in fact a Numeric
716 type, such as ``DECIMAL`` or ``NUMERIC``, use the :class:`.Numeric`
717 type or a subclass, otherwise numeric coercion between
718 ``float``/``Decimal`` may or may not function as expected.
720 """
722 __visit_name__ = "float"
724 scale = None
726 def __init__(
727 self, precision=None, asdecimal=False, decimal_return_scale=None
728 ):
729 r"""
730 Construct a Float.
732 :param precision: the numeric precision for use in DDL ``CREATE
733 TABLE``.
735 :param asdecimal: the same flag as that of :class:`.Numeric`, but
736 defaults to ``False``. Note that setting this flag to ``True``
737 results in floating point conversion.
739 :param decimal_return_scale: Default scale to use when converting
740 from floats to Python decimals. Floating point values will typically
741 be much longer due to decimal inaccuracy, and most floating point
742 database types don't have a notion of "scale", so by default the
743 float type looks for the first ten decimal places when converting.
744 Specifying this value will override that length. Note that the
745 MySQL float types, which do include "scale", will use "scale"
746 as the default for decimal_return_scale, if not otherwise specified.
748 .. versionadded:: 0.9.0
750 """
751 self.precision = precision
752 self.asdecimal = asdecimal
753 self.decimal_return_scale = decimal_return_scale
755 def result_processor(self, dialect, coltype):
756 if self.asdecimal:
757 return processors.to_decimal_processor_factory(
758 decimal.Decimal, self._effective_decimal_return_scale
759 )
760 elif dialect.supports_native_decimal:
761 return processors.to_float
762 else:
763 return None
766class DateTime(_LookupExpressionAdapter, TypeEngine):
768 """A type for ``datetime.datetime()`` objects.
770 Date and time types return objects from the Python ``datetime``
771 module. Most DBAPIs have built in support for the datetime
772 module, with the noted exception of SQLite. In the case of
773 SQLite, date and time types are stored as strings which are then
774 converted back to datetime objects when rows are returned.
776 For the time representation within the datetime type, some
777 backends include additional options, such as timezone support and
778 fractional seconds support. For fractional seconds, use the
779 dialect-specific datatype, such as :class:`.mysql.TIME`. For
780 timezone support, use at least the :class:`_types.TIMESTAMP` datatype,
781 if not the dialect-specific datatype object.
783 """
785 __visit_name__ = "datetime"
787 def __init__(self, timezone=False):
788 """Construct a new :class:`.DateTime`.
790 :param timezone: boolean. Indicates that the datetime type should
791 enable timezone support, if available on the
792 **base date/time-holding type only**. It is recommended
793 to make use of the :class:`_types.TIMESTAMP` datatype directly when
794 using this flag, as some databases include separate generic
795 date/time-holding types distinct from the timezone-capable
796 TIMESTAMP datatype, such as Oracle.
799 """
800 self.timezone = timezone
802 def get_dbapi_type(self, dbapi):
803 return dbapi.DATETIME
805 @property
806 def python_type(self):
807 return dt.datetime
809 @util.memoized_property
810 def _expression_adaptations(self):
812 # Based on http://www.postgresql.org/docs/current/\
813 # static/functions-datetime.html.
815 return {
816 operators.add: {Interval: self.__class__},
817 operators.sub: {Interval: self.__class__, DateTime: Interval},
818 }
821class Date(_LookupExpressionAdapter, TypeEngine):
823 """A type for ``datetime.date()`` objects."""
825 __visit_name__ = "date"
827 def get_dbapi_type(self, dbapi):
828 return dbapi.DATETIME
830 @property
831 def python_type(self):
832 return dt.date
834 @util.memoized_property
835 def _expression_adaptations(self):
836 # Based on http://www.postgresql.org/docs/current/\
837 # static/functions-datetime.html.
839 return {
840 operators.add: {
841 Integer: self.__class__,
842 Interval: DateTime,
843 Time: DateTime,
844 },
845 operators.sub: {
846 # date - integer = date
847 Integer: self.__class__,
848 # date - date = integer.
849 Date: Integer,
850 Interval: DateTime,
851 # date - datetime = interval,
852 # this one is not in the PG docs
853 # but works
854 DateTime: Interval,
855 },
856 }
859class Time(_LookupExpressionAdapter, TypeEngine):
861 """A type for ``datetime.time()`` objects."""
863 __visit_name__ = "time"
865 def __init__(self, timezone=False):
866 self.timezone = timezone
868 def get_dbapi_type(self, dbapi):
869 return dbapi.DATETIME
871 @property
872 def python_type(self):
873 return dt.time
875 @util.memoized_property
876 def _expression_adaptations(self):
877 # Based on http://www.postgresql.org/docs/current/\
878 # static/functions-datetime.html.
880 return {
881 operators.add: {Date: DateTime, Interval: self.__class__},
882 operators.sub: {Time: Interval, Interval: self.__class__},
883 }
886class _Binary(TypeEngine):
888 """Define base behavior for binary types."""
890 def __init__(self, length=None):
891 self.length = length
893 def literal_processor(self, dialect):
894 def process(value):
895 value = value.decode(dialect.encoding).replace("'", "''")
896 return "'%s'" % value
898 return process
900 @property
901 def python_type(self):
902 return util.binary_type
904 # Python 3 - sqlite3 doesn't need the `Binary` conversion
905 # here, though pg8000 does to indicate "bytea"
906 def bind_processor(self, dialect):
907 if dialect.dbapi is None:
908 return None
910 DBAPIBinary = dialect.dbapi.Binary
912 def process(value):
913 if value is not None:
914 return DBAPIBinary(value)
915 else:
916 return None
918 return process
920 # Python 3 has native bytes() type
921 # both sqlite3 and pg8000 seem to return it,
922 # psycopg2 as of 2.5 returns 'memoryview'
923 if util.py2k:
925 def result_processor(self, dialect, coltype):
926 if util.jython:
928 def process(value):
929 if value is not None:
930 if isinstance(value, array.array):
931 return value.tostring()
932 return str(value)
933 else:
934 return None
936 else:
937 process = processors.to_str
938 return process
940 else:
942 def result_processor(self, dialect, coltype):
943 def process(value):
944 if value is not None:
945 value = bytes(value)
946 return value
948 return process
950 def coerce_compared_value(self, op, value):
951 """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
953 if isinstance(value, util.string_types):
954 return self
955 else:
956 return super(_Binary, self).coerce_compared_value(op, value)
958 def get_dbapi_type(self, dbapi):
959 return dbapi.BINARY
962class LargeBinary(_Binary):
964 """A type for large binary byte data.
966 The :class:`.LargeBinary` type corresponds to a large and/or unlengthed
967 binary type for the target platform, such as BLOB on MySQL and BYTEA for
968 PostgreSQL. It also handles the necessary conversions for the DBAPI.
970 """
972 __visit_name__ = "large_binary"
974 def __init__(self, length=None):
975 """
976 Construct a LargeBinary type.
978 :param length: optional, a length for the column for use in
979 DDL statements, for those binary types that accept a length,
980 such as the MySQL BLOB type.
982 """
983 _Binary.__init__(self, length=length)
986@util.deprecated_cls(
987 "0.6",
988 "The :class:`.Binary` class is deprecated and will be removed "
989 "in a future relase. Please use :class:`.LargeBinary`.",
990)
991class Binary(LargeBinary):
992 def __init__(self, *arg, **kw):
993 LargeBinary.__init__(self, *arg, **kw)
996class SchemaType(SchemaEventTarget):
998 """Mark a type as possibly requiring schema-level DDL for usage.
1000 Supports types that must be explicitly created/dropped (i.e. PG ENUM type)
1001 as well as types that are complimented by table or schema level
1002 constraints, triggers, and other rules.
1004 :class:`.SchemaType` classes can also be targets for the
1005 :meth:`.DDLEvents.before_parent_attach` and
1006 :meth:`.DDLEvents.after_parent_attach` events, where the events fire off
1007 surrounding the association of the type object with a parent
1008 :class:`_schema.Column`.
1010 .. seealso::
1012 :class:`.Enum`
1014 :class:`.Boolean`
1017 """
1019 def __init__(
1020 self,
1021 name=None,
1022 schema=None,
1023 metadata=None,
1024 inherit_schema=False,
1025 quote=None,
1026 _create_events=True,
1027 ):
1028 if name is not None:
1029 self.name = quoted_name(name, quote)
1030 else:
1031 self.name = None
1032 self.schema = schema
1033 self.metadata = metadata
1034 self.inherit_schema = inherit_schema
1035 self._create_events = _create_events
1037 if _create_events and self.metadata:
1038 event.listen(
1039 self.metadata,
1040 "before_create",
1041 util.portable_instancemethod(self._on_metadata_create),
1042 )
1043 event.listen(
1044 self.metadata,
1045 "after_drop",
1046 util.portable_instancemethod(self._on_metadata_drop),
1047 )
1049 def _translate_schema(self, effective_schema, map_):
1050 return map_.get(effective_schema, effective_schema)
1052 def _set_parent(self, column):
1053 column._on_table_attach(util.portable_instancemethod(self._set_table))
1055 def _variant_mapping_for_set_table(self, column):
1056 if isinstance(column.type, Variant):
1057 variant_mapping = column.type.mapping.copy()
1058 variant_mapping["_default"] = column.type.impl
1059 else:
1060 variant_mapping = None
1061 return variant_mapping
1063 def _set_table(self, column, table):
1064 if self.inherit_schema:
1065 self.schema = table.schema
1067 if not self._create_events:
1068 return
1070 variant_mapping = self._variant_mapping_for_set_table(column)
1072 event.listen(
1073 table,
1074 "before_create",
1075 util.portable_instancemethod(
1076 self._on_table_create, {"variant_mapping": variant_mapping}
1077 ),
1078 )
1079 event.listen(
1080 table,
1081 "after_drop",
1082 util.portable_instancemethod(
1083 self._on_table_drop, {"variant_mapping": variant_mapping}
1084 ),
1085 )
1086 if self.metadata is None:
1087 # TODO: what's the difference between self.metadata
1088 # and table.metadata here ?
1089 event.listen(
1090 table.metadata,
1091 "before_create",
1092 util.portable_instancemethod(
1093 self._on_metadata_create,
1094 {"variant_mapping": variant_mapping},
1095 ),
1096 )
1097 event.listen(
1098 table.metadata,
1099 "after_drop",
1100 util.portable_instancemethod(
1101 self._on_metadata_drop,
1102 {"variant_mapping": variant_mapping},
1103 ),
1104 )
1106 def copy(self, **kw):
1107 return self.adapt(self.__class__, _create_events=True)
1109 def adapt(self, impltype, **kw):
1110 schema = kw.pop("schema", self.schema)
1111 metadata = kw.pop("metadata", self.metadata)
1112 _create_events = kw.pop("_create_events", False)
1113 return impltype(
1114 name=self.name,
1115 schema=schema,
1116 inherit_schema=self.inherit_schema,
1117 metadata=metadata,
1118 _create_events=_create_events,
1119 **kw
1120 )
1122 @property
1123 def bind(self):
1124 return self.metadata and self.metadata.bind or None
1126 def create(self, bind=None, checkfirst=False):
1127 """Issue CREATE ddl for this type, if applicable."""
1129 if bind is None:
1130 bind = _bind_or_error(self)
1131 t = self.dialect_impl(bind.dialect)
1132 if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
1133 t.create(bind=bind, checkfirst=checkfirst)
1135 def drop(self, bind=None, checkfirst=False):
1136 """Issue DROP ddl for this type, if applicable."""
1138 if bind is None:
1139 bind = _bind_or_error(self)
1140 t = self.dialect_impl(bind.dialect)
1141 if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
1142 t.drop(bind=bind, checkfirst=checkfirst)
1144 def _on_table_create(self, target, bind, **kw):
1145 if not self._is_impl_for_variant(bind.dialect, kw):
1146 return
1148 t = self.dialect_impl(bind.dialect)
1149 if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
1150 t._on_table_create(target, bind, **kw)
1152 def _on_table_drop(self, target, bind, **kw):
1153 if not self._is_impl_for_variant(bind.dialect, kw):
1154 return
1156 t = self.dialect_impl(bind.dialect)
1157 if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
1158 t._on_table_drop(target, bind, **kw)
1160 def _on_metadata_create(self, target, bind, **kw):
1161 if not self._is_impl_for_variant(bind.dialect, kw):
1162 return
1164 t = self.dialect_impl(bind.dialect)
1165 if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
1166 t._on_metadata_create(target, bind, **kw)
1168 def _on_metadata_drop(self, target, bind, **kw):
1169 if not self._is_impl_for_variant(bind.dialect, kw):
1170 return
1172 t = self.dialect_impl(bind.dialect)
1173 if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
1174 t._on_metadata_drop(target, bind, **kw)
1176 def _is_impl_for_variant(self, dialect, kw):
1177 variant_mapping = kw.pop("variant_mapping", None)
1178 if variant_mapping is None:
1179 return True
1181 if (
1182 dialect.name in variant_mapping
1183 and variant_mapping[dialect.name] is self
1184 ):
1185 return True
1186 elif dialect.name not in variant_mapping:
1187 return variant_mapping["_default"] is self
1190class Enum(Emulated, String, SchemaType):
1191 """Generic Enum Type.
1193 The :class:`.Enum` type provides a set of possible string values
1194 which the column is constrained towards.
1196 The :class:`.Enum` type will make use of the backend's native "ENUM"
1197 type if one is available; otherwise, it uses a VARCHAR datatype and
1198 produces a CHECK constraint. Use of the backend-native enum type
1199 can be disabled using the :paramref:`.Enum.native_enum` flag, and
1200 the production of the CHECK constraint is configurable using the
1201 :paramref:`.Enum.create_constraint` flag.
1203 The :class:`.Enum` type also provides in-Python validation of string
1204 values during both read and write operations. When reading a value
1205 from the database in a result set, the string value is always checked
1206 against the list of possible values and a ``LookupError`` is raised
1207 if no match is found. When passing a value to the database as a
1208 plain string within a SQL statement, if the
1209 :paramref:`.Enum.validate_strings` parameter is
1210 set to True, a ``LookupError`` is raised for any string value that's
1211 not located in the given list of possible values; note that this
1212 impacts usage of LIKE expressions with enumerated values (an unusual
1213 use case).
1215 .. versionchanged:: 1.1 the :class:`.Enum` type now provides in-Python
1216 validation of input values as well as on data being returned by
1217 the database.
1219 The source of enumerated values may be a list of string values, or
1220 alternatively a PEP-435-compliant enumerated class. For the purposes
1221 of the :class:`.Enum` datatype, this class need only provide a
1222 ``__members__`` method.
1224 When using an enumerated class, the enumerated objects are used
1225 both for input and output, rather than strings as is the case with
1226 a plain-string enumerated type::
1228 import enum
1229 class MyEnum(enum.Enum):
1230 one = 1
1231 two = 2
1232 three = 3
1234 t = Table(
1235 'data', MetaData(),
1236 Column('value', Enum(MyEnum))
1237 )
1239 connection.execute(t.insert(), {"value": MyEnum.two})
1240 assert connection.scalar(t.select()) is MyEnum.two
1242 Above, the string names of each element, e.g. "one", "two", "three",
1243 are persisted to the database; the values of the Python Enum, here
1244 indicated as integers, are **not** used; the value of each enum can
1245 therefore be any kind of Python object whether or not it is persistable.
1247 In order to persist the values and not the names, the
1248 :paramref:`.Enum.values_callable` parameter may be used. The value of
1249 this parameter is a user-supplied callable, which is intended to be used
1250 with a PEP-435-compliant enumerated class and returns a list of string
1251 values to be persisted. For a simple enumeration that uses string values,
1252 a callable such as ``lambda x: [e.value for e in x]`` is sufficient.
1254 .. versionadded:: 1.1 - support for PEP-435-style enumerated
1255 classes.
1258 .. seealso::
1260 :class:`_postgresql.ENUM` - PostgreSQL-specific type,
1261 which has additional functionality.
1263 :class:`.mysql.ENUM` - MySQL-specific type
1265 """
1267 __visit_name__ = "enum"
1269 @util.deprecated_params(
1270 convert_unicode=(
1271 "1.3",
1272 "The :paramref:`.Enum.convert_unicode` parameter is deprecated "
1273 "and will be removed in a future release. All modern DBAPIs "
1274 "now support Python Unicode directly and this parameter is "
1275 "unnecessary.",
1276 )
1277 )
1278 def __init__(self, *enums, **kw):
1279 r"""Construct an enum.
1281 Keyword arguments which don't apply to a specific backend are ignored
1282 by that backend.
1284 :param \*enums: either exactly one PEP-435 compliant enumerated type
1285 or one or more string or unicode enumeration labels. If unicode
1286 labels are present, the `convert_unicode` flag is auto-enabled.
1288 .. versionadded:: 1.1 a PEP-435 style enumerated class may be
1289 passed.
1291 :param convert_unicode: Enable unicode-aware bind parameter and
1292 result-set processing for this Enum's data. This is set
1293 automatically based on the presence of unicode label strings.
1295 :param create_constraint: defaults to True. When creating a non-native
1296 enumerated type, also build a CHECK constraint on the database
1297 against the valid values.
1299 .. versionadded:: 1.1 - added :paramref:`.Enum.create_constraint`
1300 which provides the option to disable the production of the
1301 CHECK constraint for a non-native enumerated type.
1303 :param metadata: Associate this type directly with a ``MetaData``
1304 object. For types that exist on the target database as an
1305 independent schema construct (PostgreSQL), this type will be
1306 created and dropped within ``create_all()`` and ``drop_all()``
1307 operations. If the type is not associated with any ``MetaData``
1308 object, it will associate itself with each ``Table`` in which it is
1309 used, and will be created when any of those individual tables are
1310 created, after a check is performed for its existence. The type is
1311 only dropped when ``drop_all()`` is called for that ``Table``
1312 object's metadata, however.
1314 :param name: The name of this type. This is required for PostgreSQL
1315 and any future supported database which requires an explicitly
1316 named type, or an explicitly named constraint in order to generate
1317 the type and/or a table that uses it. If a PEP-435 enumerated
1318 class was used, its name (converted to lower case) is used by
1319 default.
1321 :param native_enum: Use the database's native ENUM type when
1322 available. Defaults to True. When False, uses VARCHAR + check
1323 constraint for all backends. The VARCHAR length can be controlled
1324 with :paramref:`.Enum.length`
1326 :param length: Allows specifying a custom length for the VARCHAR
1327 when :paramref:`.Enum.native_enum` is False. By default it uses the
1328 length of the longest value.
1330 .. versionadded:: 1.3.16
1332 :param schema: Schema name of this type. For types that exist on the
1333 target database as an independent schema construct (PostgreSQL),
1334 this parameter specifies the named schema in which the type is
1335 present.
1337 .. note::
1339 The ``schema`` of the :class:`.Enum` type does not
1340 by default make use of the ``schema`` established on the
1341 owning :class:`_schema.Table`. If this behavior is desired,
1342 set the ``inherit_schema`` flag to ``True``.
1344 :param quote: Set explicit quoting preferences for the type's name.
1346 :param inherit_schema: When ``True``, the "schema" from the owning
1347 :class:`_schema.Table`
1348 will be copied to the "schema" attribute of this
1349 :class:`.Enum`, replacing whatever value was passed for the
1350 ``schema`` attribute. This also takes effect when using the
1351 :meth:`_schema.Table.tometadata` operation.
1353 :param validate_strings: when True, string values that are being
1354 passed to the database in a SQL statement will be checked
1355 for validity against the list of enumerated values. Unrecognized
1356 values will result in a ``LookupError`` being raised.
1358 .. versionadded:: 1.1.0b2
1360 :param values_callable: A callable which will be passed the PEP-435
1361 compliant enumerated type, which should then return a list of string
1362 values to be persisted. This allows for alternate usages such as
1363 using the string value of an enum to be persisted to the database
1364 instead of its name.
1366 .. versionadded:: 1.2.3
1368 :param sort_key_function: a Python callable which may be used as the
1369 "key" argument in the Python ``sorted()`` built-in. The SQLAlchemy
1370 ORM requires that primary key columns which are mapped must
1371 be sortable in some way. When using an unsortable enumeration
1372 object such as a Python 3 ``Enum`` object, this parameter may be
1373 used to set a default sort key function for the objects. By
1374 default, the database value of the enumeration is used as the
1375 sorting function.
1377 .. versionadded:: 1.3.8
1381 """
1382 self._enum_init(enums, kw)
1384 @property
1385 def _enums_argument(self):
1386 if self.enum_class is not None:
1387 return [self.enum_class]
1388 else:
1389 return self.enums
1391 def _enum_init(self, enums, kw):
1392 """internal init for :class:`.Enum` and subclasses.
1394 friendly init helper used by subclasses to remove
1395 all the Enum-specific keyword arguments from kw. Allows all
1396 other arguments in kw to pass through.
1398 """
1399 self.native_enum = kw.pop("native_enum", True)
1400 self.create_constraint = kw.pop("create_constraint", True)
1401 self.values_callable = kw.pop("values_callable", None)
1402 self._sort_key_function = kw.pop("sort_key_function", NO_ARG)
1403 length_arg = kw.pop("length", NO_ARG)
1405 values, objects = self._parse_into_values(enums, kw)
1406 self._setup_for_values(values, objects, kw)
1408 convert_unicode = kw.pop("convert_unicode", None)
1409 self.validate_strings = kw.pop("validate_strings", False)
1411 if convert_unicode is None:
1412 for e in self.enums:
1413 # this is all py2k logic that can go away for py3k only,
1414 # "expect unicode" will always be implicitly true
1415 if isinstance(e, util.text_type):
1416 _expect_unicode = True
1417 break
1418 else:
1419 _expect_unicode = False
1420 else:
1421 _expect_unicode = convert_unicode
1423 if self.enums:
1424 length = max(len(x) for x in self.enums)
1425 else:
1426 length = 0
1427 if not self.native_enum and length_arg is not NO_ARG:
1428 if length_arg < length:
1429 raise ValueError(
1430 "When provided, length must be larger or equal"
1431 " than the length of the longest enum value. %s < %s"
1432 % (length_arg, length)
1433 )
1434 length = length_arg
1436 self._valid_lookup[None] = self._object_lookup[None] = None
1438 super(Enum, self).__init__(
1439 length=length, _expect_unicode=_expect_unicode
1440 )
1442 if self.enum_class:
1443 kw.setdefault("name", self.enum_class.__name__.lower())
1444 SchemaType.__init__(
1445 self,
1446 name=kw.pop("name", None),
1447 schema=kw.pop("schema", None),
1448 metadata=kw.pop("metadata", None),
1449 inherit_schema=kw.pop("inherit_schema", False),
1450 quote=kw.pop("quote", None),
1451 _create_events=kw.pop("_create_events", True),
1452 )
1454 def _parse_into_values(self, enums, kw):
1455 if not enums and "_enums" in kw:
1456 enums = kw.pop("_enums")
1458 if len(enums) == 1 and hasattr(enums[0], "__members__"):
1459 self.enum_class = enums[0]
1460 members = self.enum_class.__members__
1461 if self.values_callable:
1462 values = self.values_callable(self.enum_class)
1463 else:
1464 values = list(members)
1465 objects = [members[k] for k in members]
1466 return values, objects
1467 else:
1468 self.enum_class = None
1469 return enums, enums
1471 def _setup_for_values(self, values, objects, kw):
1472 self.enums = list(values)
1474 self._valid_lookup = dict(zip(reversed(objects), reversed(values)))
1476 self._object_lookup = dict(zip(values, objects))
1478 self._valid_lookup.update(
1479 [
1480 (value, self._valid_lookup[self._object_lookup[value]])
1481 for value in values
1482 ]
1483 )
1485 @property
1486 def sort_key_function(self):
1487 if self._sort_key_function is NO_ARG:
1488 return self._db_value_for_elem
1489 else:
1490 return self._sort_key_function
1492 @property
1493 def native(self):
1494 return self.native_enum
1496 def _db_value_for_elem(self, elem):
1497 try:
1498 return self._valid_lookup[elem]
1499 except KeyError as err:
1500 # for unknown string values, we return as is. While we can
1501 # validate these if we wanted, that does not allow for lesser-used
1502 # end-user use cases, such as using a LIKE comparison with an enum,
1503 # or for an application that wishes to apply string tests to an
1504 # ENUM (see [ticket:3725]). While we can decide to differentiate
1505 # here between an INSERT statement and a criteria used in a SELECT,
1506 # for now we're staying conservative w/ behavioral changes (perhaps
1507 # someone has a trigger that handles strings on INSERT)
1508 if not self.validate_strings and isinstance(
1509 elem, compat.string_types
1510 ):
1511 return elem
1512 else:
1513 util.raise_(
1514 LookupError(
1515 '"%s" is not among the defined enum values' % elem
1516 ),
1517 replace_context=err,
1518 )
1520 class Comparator(String.Comparator):
1521 def _adapt_expression(self, op, other_comparator):
1522 op, typ = super(Enum.Comparator, self)._adapt_expression(
1523 op, other_comparator
1524 )
1525 if op is operators.concat_op:
1526 typ = String(
1527 self.type.length, _expect_unicode=self.type._expect_unicode
1528 )
1529 return op, typ
1531 comparator_factory = Comparator
1533 def _object_value_for_elem(self, elem):
1534 try:
1535 return self._object_lookup[elem]
1536 except KeyError as err:
1537 util.raise_(
1538 LookupError(
1539 '"%s" is not among the defined enum values' % elem
1540 ),
1541 replace_context=err,
1542 )
1544 def __repr__(self):
1545 return util.generic_repr(
1546 self,
1547 additional_kw=[("native_enum", True)],
1548 to_inspect=[Enum, SchemaType],
1549 )
1551 def adapt_to_emulated(self, impltype, **kw):
1552 kw.setdefault("_expect_unicode", self._expect_unicode)
1553 kw.setdefault("validate_strings", self.validate_strings)
1554 kw.setdefault("name", self.name)
1555 kw.setdefault("schema", self.schema)
1556 kw.setdefault("inherit_schema", self.inherit_schema)
1557 kw.setdefault("metadata", self.metadata)
1558 kw.setdefault("_create_events", False)
1559 kw.setdefault("native_enum", self.native_enum)
1560 kw.setdefault("values_callable", self.values_callable)
1561 kw.setdefault("create_constraint", self.create_constraint)
1562 kw.setdefault("length", self.length)
1563 assert "_enums" in kw
1564 return impltype(**kw)
1566 def adapt(self, impltype, **kw):
1567 kw["_enums"] = self._enums_argument
1568 return super(Enum, self).adapt(impltype, **kw)
1570 def _should_create_constraint(self, compiler, **kw):
1571 if not self._is_impl_for_variant(compiler.dialect, kw):
1572 return False
1573 return (
1574 not self.native_enum or not compiler.dialect.supports_native_enum
1575 )
1577 @util.dependencies("sqlalchemy.sql.schema")
1578 def _set_table(self, schema, column, table):
1579 SchemaType._set_table(self, column, table)
1581 if not self.create_constraint:
1582 return
1584 variant_mapping = self._variant_mapping_for_set_table(column)
1586 e = schema.CheckConstraint(
1587 type_coerce(column, self).in_(self.enums),
1588 name=_defer_name(self.name),
1589 _create_rule=util.portable_instancemethod(
1590 self._should_create_constraint,
1591 {"variant_mapping": variant_mapping},
1592 ),
1593 _type_bound=True,
1594 )
1595 assert e.table is table
1597 def literal_processor(self, dialect):
1598 parent_processor = super(Enum, self).literal_processor(dialect)
1600 def process(value):
1601 value = self._db_value_for_elem(value)
1602 if parent_processor:
1603 value = parent_processor(value)
1604 return value
1606 return process
1608 def bind_processor(self, dialect):
1609 def process(value):
1610 value = self._db_value_for_elem(value)
1611 if parent_processor:
1612 value = parent_processor(value)
1613 return value
1615 parent_processor = super(Enum, self).bind_processor(dialect)
1616 return process
1618 def result_processor(self, dialect, coltype):
1619 parent_processor = super(Enum, self).result_processor(dialect, coltype)
1621 def process(value):
1622 if parent_processor:
1623 value = parent_processor(value)
1625 value = self._object_value_for_elem(value)
1626 return value
1628 return process
1630 def copy(self, **kw):
1631 return SchemaType.copy(self, **kw)
1633 @property
1634 def python_type(self):
1635 if self.enum_class:
1636 return self.enum_class
1637 else:
1638 return super(Enum, self).python_type
1641class PickleType(TypeDecorator):
1642 """Holds Python objects, which are serialized using pickle.
1644 PickleType builds upon the Binary type to apply Python's
1645 ``pickle.dumps()`` to incoming objects, and ``pickle.loads()`` on
1646 the way out, allowing any pickleable Python object to be stored as
1647 a serialized binary field.
1649 To allow ORM change events to propagate for elements associated
1650 with :class:`.PickleType`, see :ref:`mutable_toplevel`.
1652 """
1654 impl = LargeBinary
1656 def __init__(
1657 self, protocol=pickle.HIGHEST_PROTOCOL, pickler=None, comparator=None
1658 ):
1659 """
1660 Construct a PickleType.
1662 :param protocol: defaults to ``pickle.HIGHEST_PROTOCOL``.
1664 :param pickler: defaults to cPickle.pickle or pickle.pickle if
1665 cPickle is not available. May be any object with
1666 pickle-compatible ``dumps`` and ``loads`` methods.
1668 :param comparator: a 2-arg callable predicate used
1669 to compare values of this type. If left as ``None``,
1670 the Python "equals" operator is used to compare values.
1672 """
1673 self.protocol = protocol
1674 self.pickler = pickler or pickle
1675 self.comparator = comparator
1676 super(PickleType, self).__init__()
1678 def __reduce__(self):
1679 return PickleType, (self.protocol, None, self.comparator)
1681 def bind_processor(self, dialect):
1682 impl_processor = self.impl.bind_processor(dialect)
1683 dumps = self.pickler.dumps
1684 protocol = self.protocol
1685 if impl_processor:
1687 def process(value):
1688 if value is not None:
1689 value = dumps(value, protocol)
1690 return impl_processor(value)
1692 else:
1694 def process(value):
1695 if value is not None:
1696 value = dumps(value, protocol)
1697 return value
1699 return process
1701 def result_processor(self, dialect, coltype):
1702 impl_processor = self.impl.result_processor(dialect, coltype)
1703 loads = self.pickler.loads
1704 if impl_processor:
1706 def process(value):
1707 value = impl_processor(value)
1708 if value is None:
1709 return None
1710 return loads(value)
1712 else:
1714 def process(value):
1715 if value is None:
1716 return None
1717 return loads(value)
1719 return process
1721 def compare_values(self, x, y):
1722 if self.comparator:
1723 return self.comparator(x, y)
1724 else:
1725 return x == y
1728class Boolean(Emulated, TypeEngine, SchemaType):
1730 """A bool datatype.
1732 :class:`.Boolean` typically uses BOOLEAN or SMALLINT on the DDL side,
1733 and on the Python side deals in ``True`` or ``False``.
1735 The :class:`.Boolean` datatype currently has two levels of assertion
1736 that the values persisted are simple true/false values. For all
1737 backends, only the Python values ``None``, ``True``, ``False``, ``1``
1738 or ``0`` are accepted as parameter values. For those backends that
1739 don't support a "native boolean" datatype, a CHECK constraint is also
1740 created on the target column. Production of the CHECK constraint
1741 can be disabled by passing the :paramref:`.Boolean.create_constraint`
1742 flag set to ``False``.
1744 .. versionchanged:: 1.2 the :class:`.Boolean` datatype now asserts that
1745 incoming Python values are already in pure boolean form.
1748 """
1750 __visit_name__ = "boolean"
1751 native = True
1753 def __init__(self, create_constraint=True, name=None, _create_events=True):
1754 """Construct a Boolean.
1756 :param create_constraint: defaults to True. If the boolean
1757 is generated as an int/smallint, also create a CHECK constraint
1758 on the table that ensures 1 or 0 as a value.
1760 :param name: if a CHECK constraint is generated, specify
1761 the name of the constraint.
1763 """
1764 self.create_constraint = create_constraint
1765 self.name = name
1766 self._create_events = _create_events
1768 def _should_create_constraint(self, compiler, **kw):
1769 if not self._is_impl_for_variant(compiler.dialect, kw):
1770 return False
1771 return (
1772 not compiler.dialect.supports_native_boolean
1773 and compiler.dialect.non_native_boolean_check_constraint
1774 )
1776 @util.dependencies("sqlalchemy.sql.schema")
1777 def _set_table(self, schema, column, table):
1778 if not self.create_constraint:
1779 return
1781 variant_mapping = self._variant_mapping_for_set_table(column)
1783 e = schema.CheckConstraint(
1784 type_coerce(column, self).in_([0, 1]),
1785 name=_defer_name(self.name),
1786 _create_rule=util.portable_instancemethod(
1787 self._should_create_constraint,
1788 {"variant_mapping": variant_mapping},
1789 ),
1790 _type_bound=True,
1791 )
1792 assert e.table is table
1794 @property
1795 def python_type(self):
1796 return bool
1798 _strict_bools = frozenset([None, True, False])
1800 def _strict_as_bool(self, value):
1801 if value not in self._strict_bools:
1802 if not isinstance(value, int):
1803 raise TypeError("Not a boolean value: %r" % value)
1804 else:
1805 raise ValueError(
1806 "Value %r is not None, True, or False" % value
1807 )
1808 return value
1810 def literal_processor(self, dialect):
1811 compiler = dialect.statement_compiler(dialect, None)
1812 true = compiler.visit_true(None)
1813 false = compiler.visit_false(None)
1815 def process(value):
1816 return true if self._strict_as_bool(value) else false
1818 return process
1820 def bind_processor(self, dialect):
1821 _strict_as_bool = self._strict_as_bool
1822 if dialect.supports_native_boolean:
1823 _coerce = bool
1824 else:
1825 _coerce = int
1827 def process(value):
1828 value = _strict_as_bool(value)
1829 if value is not None:
1830 value = _coerce(value)
1831 return value
1833 return process
1835 def result_processor(self, dialect, coltype):
1836 if dialect.supports_native_boolean:
1837 return None
1838 else:
1839 return processors.int_to_boolean
1842class _AbstractInterval(_LookupExpressionAdapter, TypeEngine):
1843 @util.memoized_property
1844 def _expression_adaptations(self):
1845 # Based on http://www.postgresql.org/docs/current/\
1846 # static/functions-datetime.html.
1848 return {
1849 operators.add: {
1850 Date: DateTime,
1851 Interval: self.__class__,
1852 DateTime: DateTime,
1853 Time: Time,
1854 },
1855 operators.sub: {Interval: self.__class__},
1856 operators.mul: {Numeric: self.__class__},
1857 operators.truediv: {Numeric: self.__class__},
1858 operators.div: {Numeric: self.__class__},
1859 }
1861 @property
1862 def _type_affinity(self):
1863 return Interval
1865 def coerce_compared_value(self, op, value):
1866 """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
1867 return self.impl.coerce_compared_value(op, value)
1870class Interval(Emulated, _AbstractInterval, TypeDecorator):
1872 """A type for ``datetime.timedelta()`` objects.
1874 The Interval type deals with ``datetime.timedelta`` objects. In
1875 PostgreSQL, the native ``INTERVAL`` type is used; for others, the
1876 value is stored as a date which is relative to the "epoch"
1877 (Jan. 1, 1970).
1879 Note that the ``Interval`` type does not currently provide date arithmetic
1880 operations on platforms which do not support interval types natively. Such
1881 operations usually require transformation of both sides of the expression
1882 (such as, conversion of both sides into integer epoch values first) which
1883 currently is a manual procedure (such as via
1884 :attr:`~sqlalchemy.sql.expression.func`).
1886 """
1888 impl = DateTime
1889 epoch = dt.datetime.utcfromtimestamp(0)
1891 def __init__(self, native=True, second_precision=None, day_precision=None):
1892 """Construct an Interval object.
1894 :param native: when True, use the actual
1895 INTERVAL type provided by the database, if
1896 supported (currently PostgreSQL, Oracle).
1897 Otherwise, represent the interval data as
1898 an epoch value regardless.
1900 :param second_precision: For native interval types
1901 which support a "fractional seconds precision" parameter,
1902 i.e. Oracle and PostgreSQL
1904 :param day_precision: for native interval types which
1905 support a "day precision" parameter, i.e. Oracle.
1907 """
1908 super(Interval, self).__init__()
1909 self.native = native
1910 self.second_precision = second_precision
1911 self.day_precision = day_precision
1913 @property
1914 def python_type(self):
1915 return dt.timedelta
1917 def adapt_to_emulated(self, impltype, **kw):
1918 return _AbstractInterval.adapt(self, impltype, **kw)
1920 def bind_processor(self, dialect):
1921 impl_processor = self.impl.bind_processor(dialect)
1922 epoch = self.epoch
1923 if impl_processor:
1925 def process(value):
1926 if value is not None:
1927 value = epoch + value
1928 return impl_processor(value)
1930 else:
1932 def process(value):
1933 if value is not None:
1934 value = epoch + value
1935 return value
1937 return process
1939 def result_processor(self, dialect, coltype):
1940 impl_processor = self.impl.result_processor(dialect, coltype)
1941 epoch = self.epoch
1942 if impl_processor:
1944 def process(value):
1945 value = impl_processor(value)
1946 if value is None:
1947 return None
1948 return value - epoch
1950 else:
1952 def process(value):
1953 if value is None:
1954 return None
1955 return value - epoch
1957 return process
1960class JSON(Indexable, TypeEngine):
1961 """Represent a SQL JSON type.
1963 .. note:: :class:`_types.JSON`
1964 is provided as a facade for vendor-specific
1965 JSON types. Since it supports JSON SQL operations, it only
1966 works on backends that have an actual JSON type, currently:
1968 * PostgreSQL
1970 * MySQL as of version 5.7 (MariaDB as of the 10.2 series does not)
1972 * SQLite as of version 3.9
1974 :class:`_types.JSON` is part of the Core in support of the growing
1975 popularity of native JSON datatypes.
1977 The :class:`_types.JSON` type stores arbitrary JSON format data, e.g.::
1979 data_table = Table('data_table', metadata,
1980 Column('id', Integer, primary_key=True),
1981 Column('data', JSON)
1982 )
1984 with engine.connect() as conn:
1985 conn.execute(
1986 data_table.insert(),
1987 data = {"key1": "value1", "key2": "value2"}
1988 )
1990 **JSON-Specific Expression Operators**
1992 The :class:`_types.JSON`
1993 datatype provides these additional SQL operations:
1995 * Keyed index operations::
1997 data_table.c.data['some key']
1999 * Integer index operations::
2001 data_table.c.data[3]
2003 * Path index operations::
2005 data_table.c.data[('key_1', 'key_2', 5, ..., 'key_n')]
2007 * Data casters for specific JSON element types, subsequent to an index
2008 or path operation being invoked::
2010 data_table.c.data["some key"].as_integer()
2012 .. versionadded:: 1.3.11
2014 Additional operations may be available from the dialect-specific versions
2015 of :class:`_types.JSON`, such as :class:`_postgresql.JSON` and
2016 :class:`_postgresql.JSONB` which both offer additional PostgreSQL-specific
2017 operations.
2019 **Casting JSON Elements to Other Types**
2021 Index operations, i.e. those invoked by calling upon the expression using
2022 the Python bracket operator as in ``some_column['some key']``, return an
2023 expression object whose type defaults to :class:`_types.JSON` by default,
2024 so that
2025 further JSON-oriented instructions may be called upon the result type.
2026 However, it is likely more common that an index operation is expected
2027 to return a specific scalar element, such as a string or integer. In
2028 order to provide access to these elements in a backend-agnostic way,
2029 a series of data casters are provided:
2031 * :meth:`.JSON.Comparator.as_string` - return the element as a string
2033 * :meth:`.JSON.Comparator.as_boolean` - return the element as a boolean
2035 * :meth:`.JSON.Comparator.as_float` - return the element as a float
2037 * :meth:`.JSON.Comparator.as_integer` - return the element as an integer
2039 These data casters are implemented by supporting dialects in order to
2040 assure that comparisons to the above types will work as expected, such as::
2042 # integer comparison
2043 data_table.c.data["some_integer_key"].as_integer() == 5
2045 # boolean comparison
2046 data_table.c.data["some_boolean"].as_boolean() == True
2048 .. versionadded:: 1.3.11 Added type-specific casters for the basic JSON
2049 data element types.
2051 .. note::
2053 The data caster functions are new in version 1.3.11, and supersede
2054 the previous documented approaches of using CAST; for reference,
2055 this looked like::
2057 from sqlalchemy import cast, type_coerce
2058 from sqlalchemy import String, JSON
2059 cast(
2060 data_table.c.data['some_key'], String
2061 ) == type_coerce(55, JSON)
2063 The above case now works directly as::
2065 data_table.c.data['some_key'].as_integer() == 5
2067 For details on the previous comparison approach within the 1.3.x
2068 series, see the documentation for SQLAlchemy 1.2 or the included HTML
2069 files in the doc/ directory of the version's distribution.
2071 **Detecting Changes in JSON columns when using the ORM**
2073 The :class:`_types.JSON` type, when used with the SQLAlchemy ORM, does not
2074 detect in-place mutations to the structure. In order to detect these, the
2075 :mod:`sqlalchemy.ext.mutable` extension must be used. This extension will
2076 allow "in-place" changes to the datastructure to produce events which
2077 will be detected by the unit of work. See the example at :class:`.HSTORE`
2078 for a simple example involving a dictionary.
2080 **Support for JSON null vs. SQL NULL**
2082 When working with NULL values, the :class:`_types.JSON`
2083 type recommends the
2084 use of two specific constants in order to differentiate between a column
2085 that evaluates to SQL NULL, e.g. no value, vs. the JSON-encoded string
2086 of ``"null"``. To insert or select against a value that is SQL NULL,
2087 use the constant :func:`.null`::
2089 from sqlalchemy import null
2090 conn.execute(table.insert(), json_value=null())
2092 To insert or select against a value that is JSON ``"null"``, use the
2093 constant :attr:`_types.JSON.NULL`::
2095 conn.execute(table.insert(), json_value=JSON.NULL)
2097 The :class:`_types.JSON` type supports a flag
2098 :paramref:`_types.JSON.none_as_null` which when set to True will result
2099 in the Python constant ``None`` evaluating to the value of SQL
2100 NULL, and when set to False results in the Python constant
2101 ``None`` evaluating to the value of JSON ``"null"``. The Python
2102 value ``None`` may be used in conjunction with either
2103 :attr:`_types.JSON.NULL` and :func:`.null` in order to indicate NULL
2104 values, but care must be taken as to the value of the
2105 :paramref:`_types.JSON.none_as_null` in these cases.
2107 **Customizing the JSON Serializer**
2109 The JSON serializer and deserializer used by :class:`_types.JSON`
2110 defaults to
2111 Python's ``json.dumps`` and ``json.loads`` functions; in the case of the
2112 psycopg2 dialect, psycopg2 may be using its own custom loader function.
2114 In order to affect the serializer / deserializer, they are currently
2115 configurable at the :func:`_sa.create_engine` level via the
2116 :paramref:`_sa.create_engine.json_serializer` and
2117 :paramref:`_sa.create_engine.json_deserializer` parameters. For example,
2118 to turn off ``ensure_ascii``::
2120 engine = create_engine(
2121 "sqlite://",
2122 json_serializer=lambda obj: json.dumps(obj, ensure_ascii=False))
2124 .. versionchanged:: 1.3.7
2126 SQLite dialect's ``json_serializer`` and ``json_deserializer``
2127 parameters renamed from ``_json_serializer`` and
2128 ``_json_deserializer``.
2130 .. seealso::
2132 :class:`_postgresql.JSON`
2134 :class:`_postgresql.JSONB`
2136 :class:`.mysql.JSON`
2138 :class:`_sqlite.JSON`
2140 .. versionadded:: 1.1
2143 """
2145 __visit_name__ = "JSON"
2147 hashable = False
2148 NULL = util.symbol("JSON_NULL")
2149 """Describe the json value of NULL.
2151 This value is used to force the JSON value of ``"null"`` to be
2152 used as the value. A value of Python ``None`` will be recognized
2153 either as SQL NULL or JSON ``"null"``, based on the setting
2154 of the :paramref:`_types.JSON.none_as_null` flag; the
2155 :attr:`_types.JSON.NULL`
2156 constant can be used to always resolve to JSON ``"null"`` regardless
2157 of this setting. This is in contrast to the :func:`_expression.null`
2158 construct,
2159 which always resolves to SQL NULL. E.g.::
2161 from sqlalchemy import null
2162 from sqlalchemy.dialects.postgresql import JSON
2164 # will *always* insert SQL NULL
2165 obj1 = MyObject(json_value=null())
2167 # will *always* insert JSON string "null"
2168 obj2 = MyObject(json_value=JSON.NULL)
2170 session.add_all([obj1, obj2])
2171 session.commit()
2173 In order to set JSON NULL as a default value for a column, the most
2174 transparent method is to use :func:`_expression.text`::
2176 Table(
2177 'my_table', metadata,
2178 Column('json_data', JSON, default=text("'null'"))
2179 )
2181 While it is possible to use :attr:`_types.JSON.NULL` in this context, the
2182 :attr:`_types.JSON.NULL` value will be returned as the value of the column
2183 ,
2184 which in the context of the ORM or other repurposing of the default
2185 value, may not be desirable. Using a SQL expression means the value
2186 will be re-fetched from the database within the context of retrieving
2187 generated defaults.
2190 """
2192 def __init__(self, none_as_null=False):
2193 """Construct a :class:`_types.JSON` type.
2195 :param none_as_null=False: if True, persist the value ``None`` as a
2196 SQL NULL value, not the JSON encoding of ``null``. Note that
2197 when this flag is False, the :func:`.null` construct can still
2198 be used to persist a NULL value::
2200 from sqlalchemy import null
2201 conn.execute(table.insert(), data=null())
2203 .. note::
2205 :paramref:`_types.JSON.none_as_null` does **not** apply to the
2206 values passed to :paramref:`_schema.Column.default` and
2207 :paramref:`_schema.Column.server_default`; a value of ``None``
2208 passed for these parameters means "no default present".
2210 .. seealso::
2212 :attr:`.types.JSON.NULL`
2214 """
2215 self.none_as_null = none_as_null
2217 class JSONElementType(TypeEngine):
2218 """common function for index / path elements in a JSON expression."""
2220 _integer = Integer()
2221 _string = String()
2223 def string_bind_processor(self, dialect):
2224 return self._string._cached_bind_processor(dialect)
2226 def string_literal_processor(self, dialect):
2227 return self._string._cached_literal_processor(dialect)
2229 def bind_processor(self, dialect):
2230 int_processor = self._integer._cached_bind_processor(dialect)
2231 string_processor = self.string_bind_processor(dialect)
2233 def process(value):
2234 if int_processor and isinstance(value, int):
2235 value = int_processor(value)
2236 elif string_processor and isinstance(value, util.string_types):
2237 value = string_processor(value)
2238 return value
2240 return process
2242 def literal_processor(self, dialect):
2243 int_processor = self._integer._cached_literal_processor(dialect)
2244 string_processor = self.string_literal_processor(dialect)
2246 def process(value):
2247 if int_processor and isinstance(value, int):
2248 value = int_processor(value)
2249 elif string_processor and isinstance(value, util.string_types):
2250 value = string_processor(value)
2251 return value
2253 return process
2255 class JSONIndexType(JSONElementType):
2256 """Placeholder for the datatype of a JSON index value.
2258 This allows execution-time processing of JSON index values
2259 for special syntaxes.
2261 """
2263 class JSONPathType(JSONElementType):
2264 """Placeholder type for JSON path operations.
2266 This allows execution-time processing of a path-based
2267 index value into a specific SQL syntax.
2269 """
2271 class Comparator(Indexable.Comparator, Concatenable.Comparator):
2272 """Define comparison operations for :class:`_types.JSON`."""
2274 @util.dependencies("sqlalchemy.sql.default_comparator")
2275 def _setup_getitem(self, default_comparator, index):
2276 if not isinstance(index, util.string_types) and isinstance(
2277 index, compat.collections_abc.Sequence
2278 ):
2279 index = default_comparator._check_literal(
2280 self.expr,
2281 operators.json_path_getitem_op,
2282 index,
2283 bindparam_type=JSON.JSONPathType,
2284 )
2286 operator = operators.json_path_getitem_op
2287 else:
2288 index = default_comparator._check_literal(
2289 self.expr,
2290 operators.json_getitem_op,
2291 index,
2292 bindparam_type=JSON.JSONIndexType,
2293 )
2294 operator = operators.json_getitem_op
2296 return operator, index, self.type
2298 def as_boolean(self):
2299 """Cast an indexed value as boolean.
2301 e.g.::
2303 stmt = select([
2304 mytable.c.json_column['some_data'].as_boolean()
2305 ]).where(
2306 mytable.c.json_column['some_data'].as_boolean() == True
2307 )
2309 .. versionadded:: 1.3.11
2311 """
2312 return self._binary_w_type(Boolean(), "as_boolean")
2314 def as_string(self):
2315 """Cast an indexed value as string.
2317 e.g.::
2319 stmt = select([
2320 mytable.c.json_column['some_data'].as_string()
2321 ]).where(
2322 mytable.c.json_column['some_data'].as_string() ==
2323 'some string'
2324 )
2326 .. versionadded:: 1.3.11
2328 """
2329 return self._binary_w_type(String(), "as_string")
2331 def as_integer(self):
2332 """Cast an indexed value as integer.
2334 e.g.::
2336 stmt = select([
2337 mytable.c.json_column['some_data'].as_integer()
2338 ]).where(
2339 mytable.c.json_column['some_data'].as_integer() == 5
2340 )
2342 .. versionadded:: 1.3.11
2344 """
2345 return self._binary_w_type(Integer(), "as_integer")
2347 def as_float(self):
2348 """Cast an indexed value as float.
2350 e.g.::
2352 stmt = select([
2353 mytable.c.json_column['some_data'].as_float()
2354 ]).where(
2355 mytable.c.json_column['some_data'].as_float() == 29.75
2356 )
2358 .. versionadded:: 1.3.11
2360 """
2361 # note there's no Numeric or Decimal support here yet
2362 return self._binary_w_type(Float(), "as_float")
2364 def as_json(self):
2365 """Cast an indexed value as JSON.
2367 This is the default behavior of indexed elements in any case.
2369 Note that comparison of full JSON structures may not be
2370 supported by all backends.
2372 .. versionadded:: 1.3.11
2374 """
2375 return self.expr
2377 def _binary_w_type(self, typ, method_name):
2378 if not isinstance(
2379 self.expr, elements.BinaryExpression
2380 ) or self.expr.operator not in (
2381 operators.json_getitem_op,
2382 operators.json_path_getitem_op,
2383 ):
2384 raise exc.InvalidRequestError(
2385 "The JSON cast operator JSON.%s() only works with a JSON "
2386 "index expression e.g. col['q'].%s()"
2387 % (method_name, method_name)
2388 )
2389 expr = self.expr._clone()
2390 expr.type = typ
2391 return expr
2393 comparator_factory = Comparator
2395 @property
2396 def python_type(self):
2397 return dict
2399 @property
2400 def should_evaluate_none(self):
2401 """Alias of :attr:`_types.JSON.none_as_null`"""
2402 return not self.none_as_null
2404 @should_evaluate_none.setter
2405 def should_evaluate_none(self, value):
2406 self.none_as_null = not value
2408 @util.memoized_property
2409 def _str_impl(self):
2410 return String(_expect_unicode=True)
2412 def bind_processor(self, dialect):
2413 string_process = self._str_impl.bind_processor(dialect)
2415 json_serializer = dialect._json_serializer or json.dumps
2417 def process(value):
2418 if value is self.NULL:
2419 value = None
2420 elif isinstance(value, elements.Null) or (
2421 value is None and self.none_as_null
2422 ):
2423 return None
2425 serialized = json_serializer(value)
2426 if string_process:
2427 serialized = string_process(serialized)
2428 return serialized
2430 return process
2432 def result_processor(self, dialect, coltype):
2433 string_process = self._str_impl.result_processor(dialect, coltype)
2434 json_deserializer = dialect._json_deserializer or json.loads
2436 def process(value):
2437 if value is None:
2438 return None
2439 if string_process:
2440 value = string_process(value)
2441 return json_deserializer(value)
2443 return process
2446class ARRAY(SchemaEventTarget, Indexable, Concatenable, TypeEngine):
2447 """Represent a SQL Array type.
2449 .. note:: This type serves as the basis for all ARRAY operations.
2450 However, currently **only the PostgreSQL backend has support
2451 for SQL arrays in SQLAlchemy**. It is recommended to use the
2452 :class:`_postgresql.ARRAY` type directly when using ARRAY types
2453 with PostgreSQL, as it provides additional operators specific
2454 to that backend.
2456 :class:`_types.ARRAY` is part of the Core in support of various SQL
2457 standard functions such as :class:`_functions.array_agg`
2458 which explicitly involve
2459 arrays; however, with the exception of the PostgreSQL backend and possibly
2460 some third-party dialects, no other SQLAlchemy built-in dialect has support
2461 for this type.
2463 An :class:`_types.ARRAY` type is constructed given the "type"
2464 of element::
2466 mytable = Table("mytable", metadata,
2467 Column("data", ARRAY(Integer))
2468 )
2470 The above type represents an N-dimensional array,
2471 meaning a supporting backend such as PostgreSQL will interpret values
2472 with any number of dimensions automatically. To produce an INSERT
2473 construct that passes in a 1-dimensional array of integers::
2475 connection.execute(
2476 mytable.insert(),
2477 data=[1,2,3]
2478 )
2480 The :class:`_types.ARRAY` type can be constructed given a fixed number
2481 of dimensions::
2483 mytable = Table("mytable", metadata,
2484 Column("data", ARRAY(Integer, dimensions=2))
2485 )
2487 Sending a number of dimensions is optional, but recommended if the
2488 datatype is to represent arrays of more than one dimension. This number
2489 is used:
2491 * When emitting the type declaration itself to the database, e.g.
2492 ``INTEGER[][]``
2494 * When translating Python values to database values, and vice versa, e.g.
2495 an ARRAY of :class:`.Unicode` objects uses this number to efficiently
2496 access the string values inside of array structures without resorting
2497 to per-row type inspection
2499 * When used with the Python ``getitem`` accessor, the number of dimensions
2500 serves to define the kind of type that the ``[]`` operator should
2501 return, e.g. for an ARRAY of INTEGER with two dimensions::
2503 >>> expr = table.c.column[5] # returns ARRAY(Integer, dimensions=1)
2504 >>> expr = expr[6] # returns Integer
2506 For 1-dimensional arrays, an :class:`_types.ARRAY` instance with no
2507 dimension parameter will generally assume single-dimensional behaviors.
2509 SQL expressions of type :class:`_types.ARRAY` have support for "index" and
2510 "slice" behavior. The Python ``[]`` operator works normally here, given
2511 integer indexes or slices. Arrays default to 1-based indexing.
2512 The operator produces binary expression
2513 constructs which will produce the appropriate SQL, both for
2514 SELECT statements::
2516 select([mytable.c.data[5], mytable.c.data[2:7]])
2518 as well as UPDATE statements when the :meth:`_expression.Update.values`
2519 method
2520 is used::
2522 mytable.update().values({
2523 mytable.c.data[5]: 7,
2524 mytable.c.data[2:7]: [1, 2, 3]
2525 })
2527 The :class:`_types.ARRAY` type also provides for the operators
2528 :meth:`.types.ARRAY.Comparator.any` and
2529 :meth:`.types.ARRAY.Comparator.all`. The PostgreSQL-specific version of
2530 :class:`_types.ARRAY` also provides additional operators.
2532 .. versionadded:: 1.1.0
2534 .. seealso::
2536 :class:`_postgresql.ARRAY`
2538 """
2540 __visit_name__ = "ARRAY"
2542 zero_indexes = False
2543 """if True, Python zero-based indexes should be interpreted as one-based
2544 on the SQL expression side."""
2546 class Comparator(Indexable.Comparator, Concatenable.Comparator):
2548 """Define comparison operations for :class:`_types.ARRAY`.
2550 More operators are available on the dialect-specific form
2551 of this type. See :class:`.postgresql.ARRAY.Comparator`.
2553 """
2555 def _setup_getitem(self, index):
2556 if isinstance(index, slice):
2557 return_type = self.type
2558 if self.type.zero_indexes:
2559 index = slice(index.start + 1, index.stop + 1, index.step)
2560 index = Slice(
2561 _literal_as_binds(
2562 index.start,
2563 name=self.expr.key,
2564 type_=type_api.INTEGERTYPE,
2565 ),
2566 _literal_as_binds(
2567 index.stop,
2568 name=self.expr.key,
2569 type_=type_api.INTEGERTYPE,
2570 ),
2571 _literal_as_binds(
2572 index.step,
2573 name=self.expr.key,
2574 type_=type_api.INTEGERTYPE,
2575 ),
2576 )
2577 else:
2578 if self.type.zero_indexes:
2579 index += 1
2580 if self.type.dimensions is None or self.type.dimensions == 1:
2581 return_type = self.type.item_type
2582 else:
2583 adapt_kw = {"dimensions": self.type.dimensions - 1}
2584 return_type = self.type.adapt(
2585 self.type.__class__, **adapt_kw
2586 )
2588 return operators.getitem, index, return_type
2590 def contains(self, *arg, **kw):
2591 raise NotImplementedError(
2592 "ARRAY.contains() not implemented for the base "
2593 "ARRAY type; please use the dialect-specific ARRAY type"
2594 )
2596 @util.dependencies("sqlalchemy.sql.elements")
2597 def any(self, elements, other, operator=None):
2598 """Return ``other operator ANY (array)`` clause.
2600 Argument places are switched, because ANY requires array
2601 expression to be on the right hand-side.
2603 E.g.::
2605 from sqlalchemy.sql import operators
2607 conn.execute(
2608 select([table.c.data]).where(
2609 table.c.data.any(7, operator=operators.lt)
2610 )
2611 )
2613 :param other: expression to be compared
2614 :param operator: an operator object from the
2615 :mod:`sqlalchemy.sql.operators`
2616 package, defaults to :func:`.operators.eq`.
2618 .. seealso::
2620 :func:`_expression.any_`
2622 :meth:`.types.ARRAY.Comparator.all`
2624 """
2625 operator = operator if operator else operators.eq
2626 return operator(
2627 elements._literal_as_binds(other),
2628 elements.CollectionAggregate._create_any(self.expr),
2629 )
2631 @util.dependencies("sqlalchemy.sql.elements")
2632 def all(self, elements, other, operator=None):
2633 """Return ``other operator ALL (array)`` clause.
2635 Argument places are switched, because ALL requires array
2636 expression to be on the right hand-side.
2638 E.g.::
2640 from sqlalchemy.sql import operators
2642 conn.execute(
2643 select([table.c.data]).where(
2644 table.c.data.all(7, operator=operators.lt)
2645 )
2646 )
2648 :param other: expression to be compared
2649 :param operator: an operator object from the
2650 :mod:`sqlalchemy.sql.operators`
2651 package, defaults to :func:`.operators.eq`.
2653 .. seealso::
2655 :func:`_expression.all_`
2657 :meth:`.types.ARRAY.Comparator.any`
2659 """
2660 operator = operator if operator else operators.eq
2661 return operator(
2662 elements._literal_as_binds(other),
2663 elements.CollectionAggregate._create_all(self.expr),
2664 )
2666 comparator_factory = Comparator
2668 def __init__(
2669 self, item_type, as_tuple=False, dimensions=None, zero_indexes=False
2670 ):
2671 """Construct an :class:`_types.ARRAY`.
2673 E.g.::
2675 Column('myarray', ARRAY(Integer))
2677 Arguments are:
2679 :param item_type: The data type of items of this array. Note that
2680 dimensionality is irrelevant here, so multi-dimensional arrays like
2681 ``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as
2682 ``ARRAY(ARRAY(Integer))`` or such.
2684 :param as_tuple=False: Specify whether return results
2685 should be converted to tuples from lists. This parameter is
2686 not generally needed as a Python list corresponds well
2687 to a SQL array.
2689 :param dimensions: if non-None, the ARRAY will assume a fixed
2690 number of dimensions. This impacts how the array is declared
2691 on the database, how it goes about interpreting Python and
2692 result values, as well as how expression behavior in conjunction
2693 with the "getitem" operator works. See the description at
2694 :class:`_types.ARRAY` for additional detail.
2696 :param zero_indexes=False: when True, index values will be converted
2697 between Python zero-based and SQL one-based indexes, e.g.
2698 a value of one will be added to all index values before passing
2699 to the database.
2701 """
2702 if isinstance(item_type, ARRAY):
2703 raise ValueError(
2704 "Do not nest ARRAY types; ARRAY(basetype) "
2705 "handles multi-dimensional arrays of basetype"
2706 )
2707 if isinstance(item_type, type):
2708 item_type = item_type()
2709 self.item_type = item_type
2710 self.as_tuple = as_tuple
2711 self.dimensions = dimensions
2712 self.zero_indexes = zero_indexes
2714 @property
2715 def hashable(self):
2716 return self.as_tuple
2718 @property
2719 def python_type(self):
2720 return list
2722 def compare_values(self, x, y):
2723 return x == y
2725 def _set_parent(self, column):
2726 """Support SchemaEventTarget"""
2728 if isinstance(self.item_type, SchemaEventTarget):
2729 self.item_type._set_parent(column)
2731 def _set_parent_with_dispatch(self, parent):
2732 """Support SchemaEventTarget"""
2734 super(ARRAY, self)._set_parent_with_dispatch(parent)
2736 if isinstance(self.item_type, SchemaEventTarget):
2737 self.item_type._set_parent_with_dispatch(parent)
2740class REAL(Float):
2742 """The SQL REAL type."""
2744 __visit_name__ = "REAL"
2747class FLOAT(Float):
2749 """The SQL FLOAT type."""
2751 __visit_name__ = "FLOAT"
2754class NUMERIC(Numeric):
2756 """The SQL NUMERIC type."""
2758 __visit_name__ = "NUMERIC"
2761class DECIMAL(Numeric):
2763 """The SQL DECIMAL type."""
2765 __visit_name__ = "DECIMAL"
2768class INTEGER(Integer):
2770 """The SQL INT or INTEGER type."""
2772 __visit_name__ = "INTEGER"
2775INT = INTEGER
2778class SMALLINT(SmallInteger):
2780 """The SQL SMALLINT type."""
2782 __visit_name__ = "SMALLINT"
2785class BIGINT(BigInteger):
2787 """The SQL BIGINT type."""
2789 __visit_name__ = "BIGINT"
2792class TIMESTAMP(DateTime):
2794 """The SQL TIMESTAMP type.
2796 :class:`_types.TIMESTAMP` datatypes have support for timezone
2797 storage on some backends, such as PostgreSQL and Oracle. Use the
2798 :paramref:`~types.TIMESTAMP.timezone` argument in order to enable
2799 "TIMESTAMP WITH TIMEZONE" for these backends.
2801 """
2803 __visit_name__ = "TIMESTAMP"
2805 def __init__(self, timezone=False):
2806 """Construct a new :class:`_types.TIMESTAMP`.
2808 :param timezone: boolean. Indicates that the TIMESTAMP type should
2809 enable timezone support, if available on the target database.
2810 On a per-dialect basis is similar to "TIMESTAMP WITH TIMEZONE".
2811 If the target database does not support timezones, this flag is
2812 ignored.
2815 """
2816 super(TIMESTAMP, self).__init__(timezone=timezone)
2818 def get_dbapi_type(self, dbapi):
2819 return dbapi.TIMESTAMP
2822class DATETIME(DateTime):
2824 """The SQL DATETIME type."""
2826 __visit_name__ = "DATETIME"
2829class DATE(Date):
2831 """The SQL DATE type."""
2833 __visit_name__ = "DATE"
2836class TIME(Time):
2838 """The SQL TIME type."""
2840 __visit_name__ = "TIME"
2843class TEXT(Text):
2845 """The SQL TEXT type."""
2847 __visit_name__ = "TEXT"
2850class CLOB(Text):
2852 """The CLOB type.
2854 This type is found in Oracle and Informix.
2855 """
2857 __visit_name__ = "CLOB"
2860class VARCHAR(String):
2862 """The SQL VARCHAR type."""
2864 __visit_name__ = "VARCHAR"
2867class NVARCHAR(Unicode):
2869 """The SQL NVARCHAR type."""
2871 __visit_name__ = "NVARCHAR"
2874class CHAR(String):
2876 """The SQL CHAR type."""
2878 __visit_name__ = "CHAR"
2881class NCHAR(Unicode):
2883 """The SQL NCHAR type."""
2885 __visit_name__ = "NCHAR"
2888class BLOB(LargeBinary):
2890 """The SQL BLOB type."""
2892 __visit_name__ = "BLOB"
2895class BINARY(_Binary):
2897 """The SQL BINARY type."""
2899 __visit_name__ = "BINARY"
2902class VARBINARY(_Binary):
2904 """The SQL VARBINARY type."""
2906 __visit_name__ = "VARBINARY"
2909class BOOLEAN(Boolean):
2911 """The SQL BOOLEAN type."""
2913 __visit_name__ = "BOOLEAN"
2916class NullType(TypeEngine):
2918 """An unknown type.
2920 :class:`.NullType` is used as a default type for those cases where
2921 a type cannot be determined, including:
2923 * During table reflection, when the type of a column is not recognized
2924 by the :class:`.Dialect`
2925 * When constructing SQL expressions using plain Python objects of
2926 unknown types (e.g. ``somecolumn == my_special_object``)
2927 * When a new :class:`_schema.Column` is created,
2928 and the given type is passed
2929 as ``None`` or is not passed at all.
2931 The :class:`.NullType` can be used within SQL expression invocation
2932 without issue, it just has no behavior either at the expression
2933 construction level or at the bind-parameter/result processing level.
2934 :class:`.NullType` will result in a :exc:`.CompileError` if the compiler
2935 is asked to render the type itself, such as if it is used in a
2936 :func:`.cast` operation or within a schema creation operation such as that
2937 invoked by :meth:`_schema.MetaData.create_all` or the
2938 :class:`.CreateTable`
2939 construct.
2941 """
2943 __visit_name__ = "null"
2945 _isnull = True
2947 hashable = False
2949 def literal_processor(self, dialect):
2950 def process(value):
2951 return "NULL"
2953 return process
2955 class Comparator(TypeEngine.Comparator):
2956 def _adapt_expression(self, op, other_comparator):
2957 if isinstance(
2958 other_comparator, NullType.Comparator
2959 ) or not operators.is_commutative(op):
2960 return op, self.expr.type
2961 else:
2962 return other_comparator._adapt_expression(op, self)
2964 comparator_factory = Comparator
2967class MatchType(Boolean):
2968 """Refers to the return type of the MATCH operator.
2970 As the :meth:`.ColumnOperators.match` is probably the most open-ended
2971 operator in generic SQLAlchemy Core, we can't assume the return type
2972 at SQL evaluation time, as MySQL returns a floating point, not a boolean,
2973 and other backends might do something different. So this type
2974 acts as a placeholder, currently subclassing :class:`.Boolean`.
2975 The type allows dialects to inject result-processing functionality
2976 if needed, and on MySQL will return floating-point values.
2978 .. versionadded:: 1.0.0
2980 """
2983NULLTYPE = NullType()
2984BOOLEANTYPE = Boolean()
2985STRINGTYPE = String()
2986INTEGERTYPE = Integer()
2987MATCHTYPE = MatchType()
2989_type_map = {
2990 int: Integer(),
2991 float: Float(),
2992 bool: BOOLEANTYPE,
2993 decimal.Decimal: Numeric(),
2994 dt.date: Date(),
2995 dt.datetime: DateTime(),
2996 dt.time: Time(),
2997 dt.timedelta: Interval(),
2998 util.NoneType: NULLTYPE,
2999}
3001if util.py3k:
3002 _type_map[bytes] = LargeBinary() # noqa
3003 _type_map[str] = Unicode()
3004else:
3005 _type_map[unicode] = Unicode() # noqa
3006 _type_map[str] = String()
3008_type_map_get = _type_map.get
3011def _resolve_value_to_type(value):
3012 _result_type = _type_map_get(type(value), False)
3013 if _result_type is False:
3014 # use inspect() to detect SQLAlchemy built-in
3015 # objects.
3016 insp = inspection.inspect(value, False)
3017 if (
3018 insp is not None
3019 and
3020 # foil mock.Mock() and other impostors by ensuring
3021 # the inspection target itself self-inspects
3022 insp.__class__ in inspection._registrars
3023 ):
3024 raise exc.ArgumentError(
3025 "Object %r is not legal as a SQL literal value" % value
3026 )
3027 return NULLTYPE
3028 else:
3029 return _result_type
3032# back-assign to type_api
3033type_api.BOOLEANTYPE = BOOLEANTYPE
3034type_api.STRINGTYPE = STRINGTYPE
3035type_api.INTEGERTYPE = INTEGERTYPE
3036type_api.NULLTYPE = NULLTYPE
3037type_api.MATCHTYPE = MATCHTYPE
3038type_api.INDEXABLE = Indexable
3039type_api._resolve_value_to_type = _resolve_value_to_type
3040TypeEngine.Comparator.BOOLEANTYPE = BOOLEANTYPE